From af1a266670d040d2f4083ff309d732d648afba2a Mon Sep 17 00:00:00 2001 From: Angelos Mouzakitis Date: Tue, 10 Oct 2023 14:33:42 +0000 Subject: Add submodule dependency files Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec --- meson/.editorconfig | 27 + meson/.flake8 | 31 + meson/.gitattributes | 5 + meson/.github/ISSUE_TEMPLATE/bug_report.md | 25 + meson/.github/codecov.yml | 8 + meson/.github/workflows/cygwin.yml | 96 + meson/.github/workflows/file_format.yml | 17 + meson/.github/workflows/images.yml | 62 + meson/.github/workflows/lint_mypy.yml | 47 + meson/.github/workflows/macos.yml | 98 + meson/.github/workflows/msys2.yml | 104 + meson/.github/workflows/nonative.yml | 38 + meson/.github/workflows/os_comp.yml | 129 + .../.github/workflows/unusedargs_missingreturn.yml | 74 + meson/.github/workflows/website.yml | 47 + meson/.gitignore | 34 + meson/.lgtm.yml | 4 + meson/.mailmap | 10 + meson/.mypy.ini | 23 + meson/.pylintrc | 26 + meson/CODEOWNERS | 9 + meson/COPYING | 202 + meson/MANIFEST.in | 20 + meson/README.md | 93 + meson/azure-pipelines.yml | 94 + meson/ci/azure-steps.yml | 23 + meson/ci/ciimage/.gitignore | 3 + meson/ci/ciimage/arch/image.json | 7 + meson/ci/ciimage/arch/install.sh | 54 + meson/ci/ciimage/bionic/image.json | 8 + meson/ci/ciimage/bionic/install.sh | 60 + meson/ci/ciimage/build.py | 241 + meson/ci/ciimage/common.sh | 47 + meson/ci/ciimage/cuda/image.json | 8 + meson/ci/ciimage/cuda/install.sh | 21 + meson/ci/ciimage/fedora/image.json | 8 + meson/ci/ciimage/fedora/install.sh | 29 + meson/ci/ciimage/opensuse/image.json | 9 + meson/ci/ciimage/opensuse/install.sh | 48 + meson/ci/ciimage/ubuntu-rolling/image.json | 8 + meson/ci/ciimage/ubuntu-rolling/install.sh | 54 + meson/ci/ciimage/ubuntu-rolling/test.sh | 12 + meson/ci/run.ps1 | 106 + meson/ci/upload_cov.sh | 13 + meson/ci/usercustomize.py | 19 + meson/contributing.md | 8 + meson/cross/arm64cl.txt | 17 + meson/cross/armcc.txt | 20 + meson/cross/armclang-linux.txt | 34 + meson/cross/armclang.txt | 20 + meson/cross/c2000.txt | 28 + meson/cross/ccomp-armv7a.txt | 13 + meson/cross/ccrx.txt | 22 + meson/cross/iphone.txt | 27 + meson/cross/linux-mingw-w64-32bit.json | 7 + meson/cross/linux-mingw-w64-32bit.txt | 31 + meson/cross/linux-mingw-w64-64bit.json | 7 + meson/cross/linux-mingw-w64-64bit.txt | 30 + meson/cross/none.txt | 18 + meson/cross/ownstdlib.txt | 13 + meson/cross/tvos.txt | 28 + meson/cross/ubuntu-armhf.json | 5 + meson/cross/ubuntu-armhf.txt | 29 + meson/cross/ubuntu-faketarget.txt | 13 + meson/cross/wasm.txt | 18 + meson/cross/xc16.txt | 26 + meson/data/.coveragerc.in | 25 + meson/data/com.mesonbuild.install.policy | 22 + meson/data/macros.meson | 45 + meson/data/schema.xsd | 96 + meson/data/shell-completions/bash/meson | 416 + meson/data/shell-completions/zsh/_meson | 425 + meson/data/syntax-highlighting/emacs/meson.el | 31 + meson/data/syntax-highlighting/vim/README | 4 + .../syntax-highlighting/vim/ftdetect/meson.vim | 3 + .../syntax-highlighting/vim/ftplugin/meson.vim | 20 + .../data/syntax-highlighting/vim/indent/meson.vim | 181 + .../data/syntax-highlighting/vim/syntax/meson.vim | 154 + meson/data/test.schema.json | 179 + meson/docs/.editorconfig | 2 + meson/docs/README.md | 40 + meson/docs/genrelnotes.py | 73 + meson/docs/markdown/ARM-performance-test.md | 81 + meson/docs/markdown/Adding-arguments.md | 72 + .../docs/markdown/Adding-new-projects-to-wrapdb.md | 267 + meson/docs/markdown/Additional.md | 8 + meson/docs/markdown/Build-options.md | 229 + meson/docs/markdown/Build-system-converters.md | 27 + meson/docs/markdown/Build-targets.md | 100 + meson/docs/markdown/Builtin-options.md | 262 + meson/docs/markdown/CMake-module.md | 289 + meson/docs/markdown/Code-formatting.md | 58 + meson/docs/markdown/Commands.md | 312 + meson/docs/markdown/Comparisons.md | 78 + meson/docs/markdown/Compiler-properties.md | 230 + meson/docs/markdown/Conference-presentations.md | 31 + meson/docs/markdown/Configuration.md | 198 + .../docs/markdown/Configuring-a-build-directory.md | 120 + meson/docs/markdown/Contact-information.md | 14 + meson/docs/markdown/Continuous-Integration.md | 279 + meson/docs/markdown/Contributing.md | 568 + meson/docs/markdown/Creating-Linux-binaries.md | 133 + meson/docs/markdown/Creating-OSX-packages.md | 158 + meson/docs/markdown/Creating-releases.md | 80 + meson/docs/markdown/Cross-compilation.md | 348 + meson/docs/markdown/Cuda-module.md | 186 + meson/docs/markdown/Custom-build-targets.md | 46 + meson/docs/markdown/Cython.md | 33 + meson/docs/markdown/D.md | 127 + meson/docs/markdown/Dependencies.md | 714 ++ meson/docs/markdown/Design-rationale.md | 261 + meson/docs/markdown/Disabler.md | 68 + meson/docs/markdown/Dlang-module.md | 44 + meson/docs/markdown/External-Project-module.md | 128 + meson/docs/markdown/External-commands.md | 52 + meson/docs/markdown/FAQ.md | 631 ++ meson/docs/markdown/Feature-autodetection.md | 39 + meson/docs/markdown/Fs-module.md | 217 + meson/docs/markdown/Generating-sources.md | 206 + meson/docs/markdown/Getting-meson.md | 96 + meson/docs/markdown/Getting-meson_ptbr.md | 93 + meson/docs/markdown/Getting-meson_zh.md | 56 + meson/docs/markdown/Gnome-module.md | 378 + meson/docs/markdown/Hotdoc-module.md | 79 + meson/docs/markdown/IDE-integration.md | 365 + meson/docs/markdown/Icestorm-module.md | 27 + meson/docs/markdown/In-the-press.md | 8 + meson/docs/markdown/Include-directories.md | 30 + meson/docs/markdown/IndepthTutorial.md | 150 + meson/docs/markdown/Installing.md | 149 + meson/docs/markdown/Java.md | 24 + meson/docs/markdown/Keyval-module.md | 58 + meson/docs/markdown/Localisation.md | 86 + meson/docs/markdown/Machine-files.md | 359 + meson/docs/markdown/Manual.md | 9 + meson/docs/markdown/Meson-sample.md | 80 + meson/docs/markdown/MesonCI.md | 43 + meson/docs/markdown/Mixing-build-systems.md | 55 + meson/docs/markdown/Modules.md | 25 + meson/docs/markdown/Native-environments.md | 50 + meson/docs/markdown/Overview.md | 58 + meson/docs/markdown/Performance-comparison.md | 7 + meson/docs/markdown/Pkg-config-files.md | 30 + meson/docs/markdown/Pkgconfig-module.md | 127 + meson/docs/markdown/Playground.md | 35 + meson/docs/markdown/Porting-from-autotools.md | 700 ++ meson/docs/markdown/Precompiled-headers.md | 119 + meson/docs/markdown/Project-templates.md | 49 + meson/docs/markdown/Python-3-module.md | 57 + meson/docs/markdown/Python-module.md | 248 + meson/docs/markdown/Qt4-module.md | 6 + meson/docs/markdown/Qt5-module.md | 6 + meson/docs/markdown/Qt6-module.md | 8 + meson/docs/markdown/Quick-guide.md | 158 + meson/docs/markdown/RPM-module.md | 16 + meson/docs/markdown/Reference-manual.md | 2923 +++++ meson/docs/markdown/Reference-tables.md | 334 + meson/docs/markdown/Release-notes-for-0.37.0.md | 173 + meson/docs/markdown/Release-notes-for-0.38.0.md | 123 + meson/docs/markdown/Release-notes-for-0.39.0.md | 16 + meson/docs/markdown/Release-notes-for-0.40.0.md | 152 + meson/docs/markdown/Release-notes-for-0.41.0.md | 84 + meson/docs/markdown/Release-notes-for-0.42.0.md | 148 + meson/docs/markdown/Release-notes-for-0.43.0.md | 125 + meson/docs/markdown/Release-notes-for-0.44.0.md | 154 + meson/docs/markdown/Release-notes-for-0.45.0.md | 203 + meson/docs/markdown/Release-notes-for-0.46.0.md | 329 + meson/docs/markdown/Release-notes-for-0.47.0.md | 312 + meson/docs/markdown/Release-notes-for-0.48.0.md | 343 + meson/docs/markdown/Release-notes-for-0.49.0.md | 327 + meson/docs/markdown/Release-notes-for-0.50.0.md | 350 + meson/docs/markdown/Release-notes-for-0.51.0.md | 337 + meson/docs/markdown/Release-notes-for-0.52.0.md | 252 + meson/docs/markdown/Release-notes-for-0.53.0.md | 219 + meson/docs/markdown/Release-notes-for-0.54.0.md | 394 + meson/docs/markdown/Release-notes-for-0.55.0.md | 332 + meson/docs/markdown/Release-notes-for-0.56.0.md | 375 + meson/docs/markdown/Release-notes-for-0.57.0.md | 360 + meson/docs/markdown/Release-notes-for-0.58.0.md | 357 + meson/docs/markdown/Release-notes-for-0.59.0.md | 235 + meson/docs/markdown/Release-notes.md | 1 + meson/docs/markdown/Release-procedure.md | 69 + meson/docs/markdown/Reproducible-builds.md | 20 + meson/docs/markdown/Rewriter.md | 247 + meson/docs/markdown/Run-targets.md | 55 + meson/docs/markdown/Running-Meson.md | 210 + meson/docs/markdown/Rust-module.md | 83 + .../Shipping-prebuilt-binaries-as-wraps.md | 36 + meson/docs/markdown/Simd-module.md | 72 + meson/docs/markdown/Simple-comparison.md | 88 + meson/docs/markdown/SimpleStart.md | 150 + meson/docs/markdown/SourceSet-module.md | 212 + meson/docs/markdown/Style-guide.md | 87 + meson/docs/markdown/Subprojects.md | 382 + meson/docs/markdown/Syntax.md | 746 ++ meson/docs/markdown/Threads.md | 20 + meson/docs/markdown/Tutorial.md | 209 + meson/docs/markdown/Unit-tests.md | 270 + meson/docs/markdown/Unity-builds.md | 39 + meson/docs/markdown/Use-of-Python.md | 48 + meson/docs/markdown/Users.md | 158 + .../markdown/Using-multiple-build-directories.md | 93 + meson/docs/markdown/Using-the-WrapDB.md | 47 + meson/docs/markdown/Using-with-Visual-Studio.md | 47 + meson/docs/markdown/Using-wraptool.md | 78 + meson/docs/markdown/Vala.md | 350 + meson/docs/markdown/Videos.md | 45 + meson/docs/markdown/Vs-External.md | 57 + meson/docs/markdown/Windows-module.md | 30 + .../docs/markdown/Wrap-best-practices-and-tips.md | 158 + .../docs/markdown/Wrap-dependency-system-manual.md | 266 + meson/docs/markdown/Wrap-review-guidelines.md | 94 + meson/docs/markdown/Wrapdb-projects.md | 16 + meson/docs/markdown/_Sidebar.md | 15 + meson/docs/markdown/_include_qt_base.md | 160 + meson/docs/markdown/fallback-wraptool.md | 43 + meson/docs/markdown/howtox.md | 325 + meson/docs/markdown/i18n-module.md | 53 + meson/docs/markdown/images/buildtime.png | Bin 0 -> 8764 bytes meson/docs/markdown/images/conftime.png | Bin 0 -> 10729 bytes meson/docs/markdown/images/emptytime.png | Bin 0 -> 9960 bytes meson/docs/markdown/images/glib_build.png | Bin 0 -> 10228 bytes meson/docs/markdown/images/glib_conf.png | Bin 0 -> 9186 bytes meson/docs/markdown/images/glib_empty.png | Bin 0 -> 8894 bytes meson/docs/markdown/images/glib_link.png | Bin 0 -> 9057 bytes meson/docs/markdown/images/gtksample.png | Bin 0 -> 3853 bytes meson/docs/markdown/images/linux_alldone.png | Bin 0 -> 78463 bytes meson/docs/markdown/images/meson_mac1.png | Bin 0 -> 62177 bytes meson/docs/markdown/images/meson_mac2.png | Bin 0 -> 90454 bytes meson/docs/markdown/images/meson_mac3.png | Bin 0 -> 122178 bytes meson/docs/markdown/images/meson_mac4.png | Bin 0 -> 77246 bytes meson/docs/markdown/images/meson_mac5.png | Bin 0 -> 198834 bytes meson/docs/markdown/images/osx_xcode.png | Bin 0 -> 186012 bytes meson/docs/markdown/images/py3-install-1.png | Bin 0 -> 101763 bytes meson/docs/markdown/images/py3-install-2.png | Bin 0 -> 99866 bytes meson/docs/markdown/images/py3-install-3.png | Bin 0 -> 101063 bytes meson/docs/markdown/images/win_dlvs.png | Bin 0 -> 219590 bytes meson/docs/markdown/images/win_downloadmeson.png | Bin 0 -> 92472 bytes meson/docs/markdown/images/win_installvs.png | Bin 0 -> 190136 bytes meson/docs/markdown/images/win_vstoolsprompt.png | Bin 0 -> 160027 bytes meson/docs/markdown/images/win_working.png | Bin 0 -> 94387 bytes meson/docs/markdown/index.md | 60 + meson/docs/markdown/legal.md | 26 + .../snippets/add_release_note_snippets_here | 3 + meson/docs/meson.build | 41 + meson/docs/sitemap.txt | 136 + meson/docs/theme/extra/images/favicon.png | Bin 0 -> 3970 bytes meson/docs/theme/extra/images/meson_logo.png | Bin 0 -> 16051 bytes .../theme/extra/prism_components/prism-meson.js | 16 + .../extra/prism_components/prism-meson.min.js | 1 + meson/docs/theme/extra/templates/brand-logo.html | 1 + meson/docs/theme/extra/templates/extra_head.html | 2 + meson/docs/theme/extra/templates/license.html | 7 + .../docs/theme/extra/templates/navbar_center.html | 1 + meson/docs/theme/extra/templates/navbar_links.html | 53 + meson/ghwt.py | 132 + meson/graphics/meson_logo.svg | 340 + meson/graphics/meson_logo_big.png | Bin 0 -> 21889 bytes meson/graphics/wrap_logo.svg | 70 + meson/man/meson.1 | 238 + meson/manual tests/1 wrap/main.c | 12 + meson/manual tests/1 wrap/meson.build | 13 + meson/manual tests/1 wrap/subprojects/sqlite.wrap | 10 + meson/manual tests/10 svn wrap/meson.build | 10 + meson/manual tests/10 svn wrap/prog.c | 6 + .../10 svn wrap/subprojects/samplesubproject.wrap | 4 + meson/manual tests/11 wrap imposter/meson.build | 8 + .../11 wrap imposter/subprojects/zlib.wrap | 10 + meson/manual tests/12 wrap mirror/meson.build | 4 + .../12 wrap mirror/subprojects/zlib.wrap | 10 + meson/manual tests/2 multiwrap/meson.build | 12 + meson/manual tests/2 multiwrap/prog.c | 66 + .../2 multiwrap/subprojects/libpng.wrap | 10 + .../manual tests/2 multiwrap/subprojects/lua.wrap | 11 + .../manual tests/2 multiwrap/subprojects/zlib.wrap | 10 + meson/manual tests/3 git wrap/meson.build | 10 + meson/manual tests/3 git wrap/prog.c | 6 + .../3 git wrap/subprojects/samplesubproject.wrap | 4 + .../manual tests/4 standalone binaries/Info.plist | 26 + .../4 standalone binaries/build_linux_package.sh | 12 + .../4 standalone binaries/build_osx_package.sh | 20 + .../4 standalone binaries/build_windows_package.py | 32 + .../4 standalone binaries/linux_bundler.sh | 7 + .../manual tests/4 standalone binaries/meson.build | 38 + meson/manual tests/4 standalone binaries/myapp.cpp | 39 + .../manual tests/4 standalone binaries/myapp.icns | Bin 0 -> 1831 bytes meson/manual tests/4 standalone binaries/myapp.iss | 18 + meson/manual tests/4 standalone binaries/myapp.sh | 10 + .../4 standalone binaries/osx_bundler.sh | 6 + .../manual tests/4 standalone binaries/readme.txt | 12 + .../4 standalone binaries/template.dmg.gz | Bin 0 -> 37311 bytes meson/manual tests/5 rpm/lib.c | 6 + meson/manual tests/5 rpm/lib.h | 1 + meson/manual tests/5 rpm/main.c | 8 + meson/manual tests/5 rpm/meson.build | 14 + meson/manual tests/6 hg wrap/meson.build | 10 + meson/manual tests/6 hg wrap/prog.c | 6 + .../6 hg wrap/subprojects/samplesubproject.wrap | 4 + .../7 vala composite widgets/meson.build | 21 + .../7 vala composite widgets/my-resources.xml | 6 + .../7 vala composite widgets/mywidget.ui | 70 + .../7 vala composite widgets/mywidget.vala | 41 + meson/manual tests/8 timeout/meson.build | 8 + meson/manual tests/8 timeout/sleepprog.c | 6 + meson/meson.py | 29 + meson/mesonbuild/__init__.py | 0 meson/mesonbuild/_pathlib.py | 73 + meson/mesonbuild/_typing.py | 120 + meson/mesonbuild/arglist.py | 334 + meson/mesonbuild/ast/__init__.py | 34 + meson/mesonbuild/ast/interpreter.py | 424 + meson/mesonbuild/ast/introspection.py | 330 + meson/mesonbuild/ast/postprocess.py | 117 + meson/mesonbuild/ast/printer.py | 366 + meson/mesonbuild/ast/visitor.py | 142 + meson/mesonbuild/backend/__init__.py | 0 meson/mesonbuild/backend/backends.py | 1616 +++ meson/mesonbuild/backend/ninjabackend.py | 3352 ++++++ meson/mesonbuild/backend/vs2010backend.py | 1562 +++ meson/mesonbuild/backend/vs2012backend.py | 38 + meson/mesonbuild/backend/vs2013backend.py | 38 + meson/mesonbuild/backend/vs2015backend.py | 38 + meson/mesonbuild/backend/vs2017backend.py | 52 + meson/mesonbuild/backend/vs2019backend.py | 47 + meson/mesonbuild/backend/xcodebackend.py | 1708 +++ meson/mesonbuild/build.py | 2686 +++++ meson/mesonbuild/cmake/__init__.py | 46 + meson/mesonbuild/cmake/client.py | 373 + meson/mesonbuild/cmake/common.py | 334 + meson/mesonbuild/cmake/data/preload.cmake | 82 + meson/mesonbuild/cmake/executor.py | 246 + meson/mesonbuild/cmake/fileapi.py | 320 + meson/mesonbuild/cmake/generator.py | 134 + meson/mesonbuild/cmake/interpreter.py | 1369 +++ meson/mesonbuild/cmake/toolchain.py | 259 + meson/mesonbuild/cmake/traceparser.py | 756 ++ meson/mesonbuild/compilers/__init__.py | 250 + meson/mesonbuild/compilers/c.py | 714 ++ .../mesonbuild/compilers/c_function_attributes.py | 132 + meson/mesonbuild/compilers/compilers.py | 1294 +++ meson/mesonbuild/compilers/cpp.py | 823 ++ meson/mesonbuild/compilers/cs.py | 150 + meson/mesonbuild/compilers/cuda.py | 760 ++ meson/mesonbuild/compilers/cython.py | 79 + meson/mesonbuild/compilers/d.py | 906 ++ meson/mesonbuild/compilers/detect.py | 1219 +++ meson/mesonbuild/compilers/fortran.py | 504 + meson/mesonbuild/compilers/java.py | 104 + meson/mesonbuild/compilers/mixins/__init__.py | 0 meson/mesonbuild/compilers/mixins/arm.py | 190 + meson/mesonbuild/compilers/mixins/c2000.py | 124 + meson/mesonbuild/compilers/mixins/ccrx.py | 130 + meson/mesonbuild/compilers/mixins/clang.py | 162 + meson/mesonbuild/compilers/mixins/clike.py | 1267 +++ meson/mesonbuild/compilers/mixins/compcert.py | 131 + meson/mesonbuild/compilers/mixins/elbrus.py | 82 + meson/mesonbuild/compilers/mixins/emscripten.py | 69 + meson/mesonbuild/compilers/mixins/gnu.py | 398 + meson/mesonbuild/compilers/mixins/intel.py | 189 + meson/mesonbuild/compilers/mixins/islinker.py | 129 + meson/mesonbuild/compilers/mixins/pgi.py | 109 + meson/mesonbuild/compilers/mixins/visualstudio.py | 428 + meson/mesonbuild/compilers/mixins/xc16.py | 127 + meson/mesonbuild/compilers/objc.py | 108 + meson/mesonbuild/compilers/objcpp.py | 110 + meson/mesonbuild/compilers/rust.py | 170 + meson/mesonbuild/compilers/swift.py | 127 + meson/mesonbuild/compilers/vala.py | 138 + meson/mesonbuild/coredata.py | 1228 +++ meson/mesonbuild/dependencies/__init__.py | 275 + meson/mesonbuild/dependencies/base.py | 573 + meson/mesonbuild/dependencies/boost.py | 1080 ++ meson/mesonbuild/dependencies/cmake.py | 718 ++ meson/mesonbuild/dependencies/coarrays.py | 90 + meson/mesonbuild/dependencies/configtool.py | 178 + meson/mesonbuild/dependencies/cuda.py | 291 + meson/mesonbuild/dependencies/data/CMakeLists.txt | 98 + .../dependencies/data/CMakeListsLLVM.txt | 95 + .../mesonbuild/dependencies/data/CMakePathInfo.txt | 31 + meson/mesonbuild/dependencies/detect.py | 226 + meson/mesonbuild/dependencies/dev.py | 595 ++ meson/mesonbuild/dependencies/dub.py | 240 + meson/mesonbuild/dependencies/factory.py | 151 + meson/mesonbuild/dependencies/framework.py | 123 + meson/mesonbuild/dependencies/hdf5.py | 180 + meson/mesonbuild/dependencies/misc.py | 623 ++ meson/mesonbuild/dependencies/mpi.py | 236 + meson/mesonbuild/dependencies/pkgconfig.py | 503 + meson/mesonbuild/dependencies/platform.py | 58 + meson/mesonbuild/dependencies/qt.py | 438 + meson/mesonbuild/dependencies/scalapack.py | 153 + meson/mesonbuild/dependencies/ui.py | 277 + meson/mesonbuild/depfile.py | 85 + meson/mesonbuild/envconfig.py | 425 + meson/mesonbuild/environment.py | 867 ++ meson/mesonbuild/interpreter/__init__.py | 25 + meson/mesonbuild/interpreter/compiler.py | 785 ++ .../mesonbuild/interpreter/dependencyfallbacks.py | 351 + meson/mesonbuild/interpreter/interpreter.py | 2794 +++++ meson/mesonbuild/interpreter/interpreterobjects.py | 996 ++ meson/mesonbuild/interpreter/kwargs.py | 139 + meson/mesonbuild/interpreter/mesonmain.py | 382 + meson/mesonbuild/interpreterbase/__init__.py | 122 + meson/mesonbuild/interpreterbase/_unholder.py | 39 + meson/mesonbuild/interpreterbase/baseobjects.py | 96 + meson/mesonbuild/interpreterbase/decorators.py | 650 ++ meson/mesonbuild/interpreterbase/disabler.py | 42 + meson/mesonbuild/interpreterbase/exceptions.py | 33 + meson/mesonbuild/interpreterbase/helpers.py | 118 + .../mesonbuild/interpreterbase/interpreterbase.py | 959 ++ meson/mesonbuild/linkers/__init__.py | 126 + meson/mesonbuild/linkers/detect.py | 216 + meson/mesonbuild/linkers/linkers.py | 1433 +++ meson/mesonbuild/mcompile.py | 358 + meson/mesonbuild/mconf.py | 334 + meson/mesonbuild/mdevenv.py | 78 + meson/mesonbuild/mdist.py | 319 + meson/mesonbuild/mesondata.py | 394 + meson/mesonbuild/mesonlib/__init__.py | 30 + meson/mesonbuild/mesonlib/platform.py | 37 + meson/mesonbuild/mesonlib/posix.py | 39 + meson/mesonbuild/mesonlib/universal.py | 2190 ++++ meson/mesonbuild/mesonlib/win32.py | 39 + meson/mesonbuild/mesonmain.py | 329 + meson/mesonbuild/minit.py | 186 + meson/mesonbuild/minstall.py | 721 ++ meson/mesonbuild/mintro.py | 543 + meson/mesonbuild/mlog.py | 395 + meson/mesonbuild/modules/__init__.py | 212 + meson/mesonbuild/modules/cmake.py | 406 + meson/mesonbuild/modules/dlang.py | 135 + meson/mesonbuild/modules/fs.py | 258 + meson/mesonbuild/modules/gnome.py | 1812 ++++ meson/mesonbuild/modules/hotdoc.py | 432 + meson/mesonbuild/modules/i18n.py | 197 + meson/mesonbuild/modules/keyval.py | 72 + meson/mesonbuild/modules/modtest.py | 30 + meson/mesonbuild/modules/pkgconfig.py | 591 + meson/mesonbuild/modules/python.py | 661 ++ meson/mesonbuild/modules/python3.py | 81 + meson/mesonbuild/modules/qt.py | 524 + meson/mesonbuild/modules/qt4.py | 25 + meson/mesonbuild/modules/qt5.py | 25 + meson/mesonbuild/modules/qt6.py | 25 + meson/mesonbuild/modules/rpm.py | 186 + meson/mesonbuild/modules/sourceset.py | 198 + meson/mesonbuild/modules/unstable_cuda.py | 350 + .../modules/unstable_external_project.py | 268 + meson/mesonbuild/modules/unstable_icestorm.py | 89 + meson/mesonbuild/modules/unstable_rust.py | 227 + meson/mesonbuild/modules/unstable_simd.py | 88 + meson/mesonbuild/modules/windows.py | 171 + meson/mesonbuild/mparser.py | 814 ++ meson/mesonbuild/msetup.py | 282 + meson/mesonbuild/msubprojects.py | 561 + meson/mesonbuild/mtest.py | 2011 ++++ meson/mesonbuild/munstable_coredata.py | 114 + meson/mesonbuild/optinterpreter.py | 234 + meson/mesonbuild/programs.py | 386 + meson/mesonbuild/rewriter.py | 970 ++ meson/mesonbuild/scripts/__init__.py | 21 + meson/mesonbuild/scripts/clangformat.py | 91 + meson/mesonbuild/scripts/clangtidy.py | 57 + meson/mesonbuild/scripts/cleantrees.py | 44 + meson/mesonbuild/scripts/cmake_run_ctgt.py | 102 + meson/mesonbuild/scripts/cmd_or_ps.ps1 | 22 + meson/mesonbuild/scripts/coverage.py | 173 + meson/mesonbuild/scripts/delwithsuffix.py | 36 + meson/mesonbuild/scripts/depfixer.py | 509 + meson/mesonbuild/scripts/depscan.py | 201 + meson/mesonbuild/scripts/dirchanger.py | 29 + meson/mesonbuild/scripts/externalproject.py | 109 + meson/mesonbuild/scripts/gettext.py | 125 + meson/mesonbuild/scripts/gtkdochelper.py | 295 + meson/mesonbuild/scripts/hotdochelper.py | 38 + meson/mesonbuild/scripts/meson_exe.py | 125 + meson/mesonbuild/scripts/msgfmthelper.py | 37 + meson/mesonbuild/scripts/regen_checker.py | 64 + meson/mesonbuild/scripts/scanbuild.py | 65 + meson/mesonbuild/scripts/symbolextractor.py | 331 + meson/mesonbuild/scripts/tags.py | 53 + meson/mesonbuild/scripts/uninstall.py | 50 + meson/mesonbuild/scripts/vcstagger.py | 44 + meson/mesonbuild/scripts/yelphelper.py | 133 + meson/mesonbuild/templates/__init__.py | 0 meson/mesonbuild/templates/cpptemplates.py | 185 + meson/mesonbuild/templates/cstemplates.py | 134 + meson/mesonbuild/templates/ctemplates.py | 166 + meson/mesonbuild/templates/cudatemplates.py | 185 + meson/mesonbuild/templates/dlangtemplates.py | 143 + meson/mesonbuild/templates/fortrantemplates.py | 140 + meson/mesonbuild/templates/javatemplates.py | 136 + meson/mesonbuild/templates/mesontemplates.py | 75 + meson/mesonbuild/templates/objcpptemplates.py | 167 + meson/mesonbuild/templates/objctemplates.py | 166 + meson/mesonbuild/templates/rusttemplates.py | 113 + meson/mesonbuild/templates/samplefactory.py | 40 + meson/mesonbuild/templates/sampleimpl.py | 21 + meson/mesonbuild/wrap/__init__.py | 59 + meson/mesonbuild/wrap/wrap.py | 607 ++ meson/mesonbuild/wrap/wraptool.py | 220 + meson/packaging/License.rtf | 73 + meson/packaging/create_zipapp.py | 22 + meson/packaging/createmsi.py | 372 + meson/packaging/createpkg.py | 117 + .../macpages/English.lproj/conclusion.html | 24 + .../packaging/macpages/English.lproj/license.html | 209 + .../packaging/macpages/English.lproj/welcome.html | 12 + meson/pyproject.toml | 2 + meson/run_cross_test.py | 63 + meson/run_custom_lint.py | 76 + meson/run_format_tests.py | 82 + meson/run_meson_command_tests.py | 190 + meson/run_mypy.py | 93 + meson/run_project_tests.py | 1593 +++ meson/run_single_test.py | 77 + meson/run_tests.py | 387 + meson/run_unittests.py | 10647 +++++++++++++++++++ meson/setup.cfg | 57 + meson/setup.py | 31 + meson/sider.yml | 7 + meson/skip_ci.py | 77 + meson/test cases/cmake/1 basic/main.cpp | 10 + meson/test cases/cmake/1 basic/meson.build | 14 + .../cmake/1 basic/subprojects/cmMod/CMakeLists.txt | 20 + .../cmake/1 basic/subprojects/cmMod/cmMod.cpp | 15 + .../cmake/1 basic/subprojects/cmMod/cmMod.hpp | 18 + .../cmake/1 basic/subprojects/cmMod/cpp_pch.hpp | 2 + meson/test cases/cmake/10 header only/main.cpp | 16 + meson/test cases/cmake/10 header only/meson.build | 12 + .../subprojects/cmMod/CMakeLists.txt | 12 + .../subprojects/cmMod/include/cmMod.hpp | 24 + .../cmake/FindSomethingLikePython.cmake | 9 + .../cmake/11 cmake_module_path/meson.build | 25 + .../subprojects/cmMod/CMakeLists.txt | 15 + .../11 cmake_module_path/subprojects/cmMod/gen.py | 9 + .../cmake/11 cmake_module_path/test.json | 5 + .../cmake/12 generator expressions/main.cpp | 10 + .../cmake/12 generator expressions/meson.build | 12 + .../subprojects/cmMod/CMakeLists.txt | 22 + .../subprojects/cmMod/include/cmMod.hpp | 31 + meson/test cases/cmake/13 system includes/main.cpp | 10 + .../cmake/13 system includes/meson.build | 18 + .../subprojects/cmMod/CMakeLists.txt | 15 + .../13 system includes/subprojects/cmMod/cmMod.cpp | 12 + .../13 system includes/subprojects/cmMod/cmMod.hpp | 13 + .../subprojects/cmMod/sysInc/triggerWarn.hpp | 14 + .../cmake/14 fortran threads/meson.build | 12 + .../cmake/15 object library advanced/main.cpp | 11 + .../cmake/15 object library advanced/meson.build | 17 + .../subprojects/cmObjLib/CMakeLists.txt | 18 + .../subprojects/cmObjLib/genC.cpp | 31 + .../subprojects/cmObjLib/libA.cpp | 9 + .../subprojects/cmObjLib/libA.hpp | 16 + .../subprojects/cmObjLib/libB.cpp | 6 + .../subprojects/cmObjLib/libB.hpp | 16 + meson/test cases/cmake/16 threads/main.cpp | 9 + meson/test cases/cmake/16 threads/meson.build | 12 + .../test cases/cmake/16 threads/meson_options.txt | 1 + .../16 threads/subprojects/cmMod/CMakeLists.txt | 15 + .../cmake/16 threads/subprojects/cmMod/cmMod.cpp | 15 + .../cmake/16 threads/subprojects/cmMod/cmMod.hpp | 21 + .../cmake/16 threads/subprojects/cmMod/main.cpp | 9 + meson/test cases/cmake/16 threads/test.json | 11 + .../cmake/17 include path order/main.cpp | 10 + .../cmake/17 include path order/meson.build | 9 + .../subprojects/cmMod/CMakeLists.txt | 34 + .../subprojects/cmMod/cmMod.cpp | 11 + .../subprojects/cmMod/incA/cmMod.hpp | 4 + .../subprojects/cmMod/incB/cmMod.hpp | 4 + .../subprojects/cmMod/incC/cmMod.hpp | 4 + .../subprojects/cmMod/incD/cmMod.hpp | 4 + .../subprojects/cmMod/incE/cmMod.hpp | 4 + .../subprojects/cmMod/incF/cmMod.hpp | 4 + .../subprojects/cmMod/incG/cmMod.hpp | 14 + .../subprojects/cmMod/incH/cmMod.hpp | 4 + .../subprojects/cmMod/incI/cmMod.hpp | 4 + .../subprojects/cmMod/incJ/cmMod.hpp | 4 + .../subprojects/cmMod/incL/cmMod.hpp | 4 + .../subprojects/cmMod/incM/cmMod.hpp | 4 + .../subprojects/cmMod/incN/cmMod.hpp | 4 + .../subprojects/cmMod/incO/cmMod.hpp | 4 + .../subprojects/cmMod/incP/cmMod.hpp | 4 + .../cmake/18 skip include files/main.cpp | 10 + .../cmake/18 skip include files/meson.build | 9 + .../subprojects/cmMod/CMakeLists.txt | 15 + .../subprojects/cmMod/cmMod.cpp | 10 + .../subprojects/cmMod/cmMod.hpp | 16 + .../subprojects/cmMod/fakeInc/CMakeLists.txt | 30 + .../subprojects/cmMod/fakeInc/cmModInc1.cpp | 7 + .../subprojects/cmMod/fakeInc/cmModInc2.cpp | 7 + .../subprojects/cmMod/fakeInc/cmModInc3.cpp | 7 + .../subprojects/cmMod/fakeInc/cmModInc4.cpp | 7 + .../test cases/cmake/19 advanced options/main.cpp | 18 + .../cmake/19 advanced options/meson.build | 29 + .../subprojects/cmOpts/CMakeLists.txt | 18 + .../subprojects/cmOpts/cmMod.cpp | 31 + .../subprojects/cmOpts/cmMod.hpp | 14 + .../subprojects/cmOpts/cmTest.cpp | 25 + .../subprojects/cmOpts/cmTest.hpp | 3 + .../subprojects/cmOpts/main.cpp | 10 + .../test cases/cmake/19 advanced options/test.json | 8 + meson/test cases/cmake/2 advanced/main.cpp | 15 + meson/test cases/cmake/2 advanced/meson.build | 27 + .../2 advanced/subprojects/cmMod/CMakeLists.txt | 38 + .../cmake/2 advanced/subprojects/cmMod/config.h.in | 3 + .../2 advanced/subprojects/cmMod/lib/cmMod.cpp | 26 + .../2 advanced/subprojects/cmMod/lib/cmMod.hpp | 13 + .../cmake/2 advanced/subprojects/cmMod/main.cpp | 11 + meson/test cases/cmake/2 advanced/test.json | 8 + .../test cases/cmake/20 cmake file/foolib.cmake.in | 1 + meson/test cases/cmake/20 cmake file/meson.build | 14 + meson/test cases/cmake/20 cmake file/test.json | 5 + .../test cases/cmake/21 shared module/meson.build | 13 + meson/test cases/cmake/21 shared module/prog.c | 108 + meson/test cases/cmake/21 shared module/runtime.c | 19 + .../subprojects/cmMod/CMakeLists.txt | 7 + .../subprojects/cmMod/module/module.c | 96 + .../subprojects/cmMod/module/module.h | 3 + .../22 cmake module/cmake_project/CMakeLists.txt | 4 + meson/test cases/cmake/22 cmake module/meson.build | 31 + .../cmake/22 cmake module/projectConfig.cmake.in | 4 + meson/test cases/cmake/22 cmake module/test.json | 6 + .../cmake/23 cmake toolchain/CMakeToolchain.cmake | 1 + .../cmake/23 cmake toolchain/meson.build | 13 + .../cmake/23 cmake toolchain/nativefile.ini.in | 9 + .../subprojects/cmMod/CMakeLists.txt | 15 + .../subprojects/cmModFortran/CMakeLists.txt | 19 + meson/test cases/cmake/24 mixing languages/main.c | 5 + .../cmake/24 mixing languages/meson.build | 13 + .../subprojects/cmTest/CMakeLists.txt | 8 + .../subprojects/cmTest/cmTest.c | 13 + .../subprojects/cmTest/cmTest.h | 3 + .../subprojects/cmTest/cmTest.m | 7 + meson/test cases/cmake/25 assembler/main.c | 18 + meson/test cases/cmake/25 assembler/meson.build | 9 + .../25 assembler/subprojects/cmTest/CMakeLists.txt | 45 + .../cmake/25 assembler/subprojects/cmTest/cmTest.c | 8 + .../25 assembler/subprojects/cmTest/cmTestAsm.s | 4 + meson/test cases/cmake/3 advanced no dep/main.cpp | 15 + .../test cases/cmake/3 advanced no dep/meson.build | 19 + .../subprojects/cmMod/CMakeLists.txt | 26 + .../subprojects/cmMod/config.h.in | 3 + .../subprojects/cmMod/lib/cmMod.cpp | 16 + .../subprojects/cmMod/lib/cmMod.hpp | 13 + .../3 advanced no dep/subprojects/cmMod/main.cpp | 10 + meson/test cases/cmake/3 advanced no dep/test.json | 11 + meson/test cases/cmake/4 code gen/main.cpp | 8 + meson/test cases/cmake/4 code gen/meson.build | 24 + .../subprojects/cmCodeGen/CMakeLists.txt | 6 + .../4 code gen/subprojects/cmCodeGen/main.cpp | 21 + meson/test cases/cmake/4 code gen/test.hpp | 5 + meson/test cases/cmake/5 object library/main.cpp | 11 + .../test cases/cmake/5 object library/meson.build | 21 + .../subprojects/cmObjLib/CMakeLists.txt | 11 + .../5 object library/subprojects/cmObjLib/libA.cpp | 5 + .../5 object library/subprojects/cmObjLib/libA.hpp | 16 + .../5 object library/subprojects/cmObjLib/libB.cpp | 6 + .../5 object library/subprojects/cmObjLib/libB.hpp | 16 + .../cmake/6 object library no dep/main.cpp | 11 + .../cmake/6 object library no dep/meson.build | 13 + .../subprojects/cmObjLib/CMakeLists.txt | 6 + .../subprojects/cmObjLib/libA.cpp | 5 + .../subprojects/cmObjLib/libA.hpp | 16 + .../subprojects/cmObjLib/libB.cpp | 5 + .../subprojects/cmObjLib/libB.hpp | 16 + meson/test cases/cmake/7 cmake options/meson.build | 3 + .../subprojects/cmOpts/CMakeLists.txt | 10 + meson/test cases/cmake/7 cmake options/test.json | 12 + meson/test cases/cmake/8 custom command/main.cpp | 11 + .../test cases/cmake/8 custom command/meson.build | 16 + .../subprojects/cmMod/CMakeLists.txt | 159 + .../subprojects/cmMod/args_test.cpp | 18 + .../8 custom command/subprojects/cmMod/cmMod.cpp | 24 + .../8 custom command/subprojects/cmMod/cmMod.hpp | 14 + .../8 custom command/subprojects/cmMod/cp.cpp | 22 + .../subprojects/cmMod/cpyBase.cpp.am | 5 + .../subprojects/cmMod/cpyBase.hpp.am | 5 + .../subprojects/cmMod/cpyInc.hpp.am | 3 + .../subprojects/cmMod/cpyNext.cpp.am | 5 + .../subprojects/cmMod/cpyNext.hpp.am | 5 + .../8 custom command/subprojects/cmMod/cpyTest.cpp | 9 + .../subprojects/cmMod/cpyTest/CMakeLists.txt | 7 + .../subprojects/cmMod/cpyTest/cpyTest.hpp | 5 + .../subprojects/cmMod/cpyTest/cpyTest2.hpp | 3 + .../subprojects/cmMod/cpyTest/cpyTest3.hpp | 3 + .../subprojects/cmMod/cpyTest/cpyTest4.hpp | 3 + .../subprojects/cmMod/cpyTest/cpyTest5.hpp | 3 + .../8 custom command/subprojects/cmMod/genMain.cpp | 40 + .../subprojects/cmMod/macro_name.cpp | 20 + .../cmake/9 disabled subproject/meson.build | 6 + meson/test cases/common/1 trivial/meson.build | 29 + meson/test cases/common/1 trivial/trivial.c | 6 + meson/test cases/common/10 man install/bar.2 | 1 + meson/test cases/common/10 man install/baz.1.in | 6 + meson/test cases/common/10 man install/foo.1 | 1 + meson/test cases/common/10 man install/foo.fr.1 | 1 + meson/test cases/common/10 man install/meson.build | 14 + meson/test cases/common/10 man install/test.json | 10 + .../common/10 man install/vanishing/meson.build | 1 + .../common/10 man install/vanishing/vanishing.1 | 1 + .../common/10 man install/vanishing/vanishing.2 | 1 + .../common/100 postconf with args/meson.build | 10 + .../common/100 postconf with args/postconf.py | 18 + .../common/100 postconf with args/prog.c | 5 + .../common/100 postconf with args/raw.dat | 1 + .../common/101 testframework options/meson.build | 8 + .../101 testframework options/meson_options.txt | 3 + .../common/101 testframework options/test.json | 10 + .../test cases/common/102 extract same name/lib.c | 3 + .../test cases/common/102 extract same name/main.c | 6 + .../common/102 extract same name/meson.build | 19 + .../common/102 extract same name/src/lib.c | 3 + .../common/103 has header symbol/meson.build | 40 + meson/test cases/common/104 has arg/meson.build | 60 + .../common/105 generatorcustom/catter.py | 14 + .../common/105 generatorcustom/gen-resx.py | 9 + meson/test cases/common/105 generatorcustom/gen.py | 13 + meson/test cases/common/105 generatorcustom/main.c | 8 + .../common/105 generatorcustom/meson.build | 28 + .../test cases/common/105 generatorcustom/res1.txt | 1 + .../test cases/common/105 generatorcustom/res2.txt | 1 + .../106 multiple dir configure file/meson.build | 11 + .../106 multiple dir configure file/subdir/foo.txt | 0 .../subdir/meson.build | 11 + .../subdir/someinput.in | 0 .../107 spaces backslash/asm output/meson.build | 2 + .../107 spaces backslash/comparer-end-notstring.c | 20 + .../common/107 spaces backslash/comparer-end.c | 16 + .../common/107 spaces backslash/comparer.c | 16 + .../common/107 spaces backslash/include/comparer.h | 4 + .../common/107 spaces backslash/meson.build | 28 + meson/test cases/common/108 ternary/meson.build | 12 + .../109 custom target capture/data_source.txt | 1 + .../common/109 custom target capture/meson.build | 24 + .../109 custom target capture/my_compiler.py | 14 + .../common/109 custom target capture/test.json | 5 + meson/test cases/common/11 subdir/meson.build | 2 + .../test cases/common/11 subdir/subdir/meson.build | 2 + meson/test cases/common/11 subdir/subdir/prog.c | 1 + .../test cases/common/110 allgenerate/converter.py | 8 + .../common/110 allgenerate/foobar.cpp.in | 6 + .../test cases/common/110 allgenerate/meson.build | 20 + meson/test cases/common/111 pathjoin/meson.build | 24 + .../common/112 subdir subproject/meson.build | 2 + .../common/112 subdir subproject/prog/meson.build | 5 + .../common/112 subdir subproject/prog/prog.c | 5 + .../subprojects/sub/meson.build | 3 + .../112 subdir subproject/subprojects/sub/sub.c | 5 + .../112 subdir subproject/subprojects/sub/sub.h | 6 + .../meson.build | 20 + meson/test cases/common/114 skip/meson.build | 4 + .../common/115 subproject project arguments/exe.c | 27 + .../115 subproject project arguments/exe.cpp | 28 + .../115 subproject project arguments/meson.build | 17 + .../subprojects/subexe/meson.build | 13 + .../subprojects/subexe/subexe.c | 27 + meson/test cases/common/116 test skip/meson.build | 4 + meson/test cases/common/116 test skip/test_skip.c | 3 + .../common/117 shared module/meson.build | 40 + meson/test cases/common/117 shared module/module.c | 96 + meson/test cases/common/117 shared module/nosyms.c | 4 + meson/test cases/common/117 shared module/prog.c | 103 + .../test cases/common/117 shared module/runtime.c | 19 + .../test cases/common/117 shared module/test.json | 7 + .../common/118 llvm ir and assembly/main.c | 13 + .../common/118 llvm ir and assembly/main.cpp | 15 + .../common/118 llvm ir and assembly/meson.build | 79 + .../118 llvm ir and assembly/square-aarch64.S | 29 + .../common/118 llvm ir and assembly/square-arm.S | 29 + .../common/118 llvm ir and assembly/square-x86.S | 36 + .../118 llvm ir and assembly/square-x86_64.S | 37 + .../common/118 llvm ir and assembly/square.ll | 4 + .../118 llvm ir and assembly/symbol-underscore.h | 5 + .../test cases/common/119 cpp and asm/meson.build | 33 + .../test cases/common/119 cpp and asm/retval-arm.S | 11 + .../test cases/common/119 cpp and asm/retval-x86.S | 11 + .../common/119 cpp and asm/retval-x86_64.S | 11 + .../common/119 cpp and asm/symbol-underscore.h | 5 + meson/test cases/common/119 cpp and asm/trivial.cc | 16 + meson/test cases/common/12 data/datafile.dat | 1 + meson/test cases/common/12 data/etcfile.dat | 1 + .../common/12 data/fileobject_datafile.dat | 1 + meson/test cases/common/12 data/meson.build | 24 + meson/test cases/common/12 data/runscript.sh | 3 + meson/test cases/common/12 data/somefile.txt | 0 meson/test cases/common/12 data/test.json | 15 + .../test cases/common/12 data/to_be_renamed_1.txt | 0 .../test cases/common/12 data/to_be_renamed_3.txt | 0 .../test cases/common/12 data/to_be_renamed_4.txt | 0 .../common/12 data/vanishing/meson.build | 1 + .../common/12 data/vanishing/to_be_renamed_2.txt | 0 .../common/12 data/vanishing/vanishing.dat | 1 + .../common/12 data/vanishing/vanishing2.dat | 4 + .../120 extract all shared library/extractor.h | 6 + .../common/120 extract all shared library/four.c | 5 + .../120 extract all shared library/func1234.def | 5 + .../120 extract all shared library/meson.build | 14 + .../common/120 extract all shared library/one.c | 5 + .../common/120 extract all shared library/prog.c | 10 + .../common/120 extract all shared library/three.c | 5 + .../common/120 extract all shared library/two.c | 5 + .../common/121 object only target/meson.build | 51 + .../common/121 object only target/obj_generator.py | 20 + .../121 object only target/objdir/meson.build | 27 + .../common/121 object only target/objdir/source4.c | 3 + .../common/121 object only target/objdir/source5.c | 3 + .../common/121 object only target/objdir/source6.c | 3 + .../common/121 object only target/prog.c | 11 + .../common/121 object only target/source.c | 3 + .../common/121 object only target/source2.c | 3 + .../common/121 object only target/source2.def | 2 + .../common/121 object only target/source3.c | 3 + .../common/121 object only target/test.json | 6 + .../common/122 no buildincdir/include/header.h | 3 + .../common/122 no buildincdir/meson.build | 14 + meson/test cases/common/122 no buildincdir/prog.c | 5 + .../123 custom target directory install/docgen.py | 15 + .../meson.build | 9 + .../123 custom target directory install/test.json | 7 + .../common/124 dependency file generation/main .c | 3 + .../124 dependency file generation/meson.build | 14 + .../inc/confdata.in | 1 + .../inc/meson.build | 6 + .../125 configure file in generator/meson.build | 4 + .../125 configure file in generator/src/gen.py | 13 + .../125 configure file in generator/src/main.c | 17 + .../src/meson.build | 7 + .../125 configure file in generator/src/source | 1 + .../common/126 generated llvm ir/copyfile.py | 6 + .../test cases/common/126 generated llvm ir/main.c | 13 + .../common/126 generated llvm ir/meson.build | 28 + .../common/126 generated llvm ir/square.ll.in | 4 + .../common/127 generated assembly/copyfile.py | 6 + .../common/127 generated assembly/empty.c | 0 .../common/127 generated assembly/main.c | 16 + .../common/127 generated assembly/meson.build | 66 + .../common/127 generated assembly/square-arm.S.in | 13 + .../common/127 generated assembly/square-x86.S.in | 34 + .../127 generated assembly/square-x86_64.S.in | 38 + .../common/127 generated assembly/square.def | 2 + .../127 generated assembly/symbol-underscore.h | 5 + .../128 build by default targets in tests/main.c | 3 + .../meson.build | 23 + .../write_file.py | 6 + .../common/129 build by default/checkexists.py | 10 + meson/test cases/common/129 build by default/foo.c | 6 + .../common/129 build by default/meson.build | 45 + .../common/129 build by default/mygen.py | 8 + .../common/129 build by default/source.txt | 1 + meson/test cases/common/13 pch/c/meson.build | 14 + meson/test cases/common/13 pch/c/pch/prog.h | 6 + meson/test cases/common/13 pch/c/prog.c | 10 + meson/test cases/common/13 pch/cpp/meson.build | 1 + meson/test cases/common/13 pch/cpp/pch/prog.hh | 1 + meson/test cases/common/13 pch/cpp/prog.cc | 11 + .../common/13 pch/generated/gen_custom.py | 5 + .../common/13 pch/generated/gen_generator.py | 7 + .../common/13 pch/generated/generated_generator.in | 1 + .../test cases/common/13 pch/generated/meson.build | 22 + .../test cases/common/13 pch/generated/pch/prog.h | 2 + meson/test cases/common/13 pch/generated/prog.c | 6 + meson/test cases/common/13 pch/meson.build | 22 + meson/test cases/common/13 pch/mixed/func.c | 7 + meson/test cases/common/13 pch/mixed/main.cc | 10 + meson/test cases/common/13 pch/mixed/meson.build | 14 + meson/test cases/common/13 pch/mixed/pch/func.h | 1 + meson/test cases/common/13 pch/mixed/pch/main.h | 1 + .../common/13 pch/userDefined/meson.build | 10 + .../test cases/common/13 pch/userDefined/pch/pch.c | 5 + .../test cases/common/13 pch/userDefined/pch/pch.h | 1 + meson/test cases/common/13 pch/userDefined/prog.c | 8 + .../withIncludeDirectories/include/lib/lib.h | 1 + .../13 pch/withIncludeDirectories/meson.build | 15 + .../13 pch/withIncludeDirectories/pch/prog.h | 1 + .../common/13 pch/withIncludeDirectories/prog.c | 10 + .../common/13 pch/withIncludeFile/meson.build | 18 + .../common/13 pch/withIncludeFile/pch/prog.h | 6 + .../common/13 pch/withIncludeFile/prog.c | 11 + .../common/130 include order/ctsub/copyfile.py | 6 + .../common/130 include order/ctsub/emptyfile.c | 0 .../common/130 include order/ctsub/main.h | 1 + .../common/130 include order/ctsub/meson.build | 9 + .../test cases/common/130 include order/inc1/hdr.h | 1 + .../test cases/common/130 include order/inc2/hdr.h | 1 + .../common/130 include order/meson.build | 36 + .../common/130 include order/ordertest.c | 10 + .../common/130 include order/sub1/main.h | 1 + .../common/130 include order/sub1/meson.build | 4 + .../common/130 include order/sub1/some.c | 6 + .../common/130 include order/sub1/some.h | 10 + .../common/130 include order/sub2/main.h | 1 + .../common/130 include order/sub2/meson.build | 2 + .../common/130 include order/sub3/main.h | 1 + .../common/130 include order/sub3/meson.build | 1 + .../common/130 include order/sub4/main.c | 8 + .../common/130 include order/sub4/main.h | 3 + .../common/130 include order/sub4/meson.build | 6 + .../test cases/common/131 override options/four.c | 9 + .../common/131 override options/meson.build | 6 + meson/test cases/common/131 override options/one.c | 3 + .../test cases/common/131 override options/three.c | 7 + meson/test cases/common/131 override options/two.c | 6 + meson/test cases/common/132 get define/concat.h | 24 + meson/test cases/common/132 get define/meson.build | 104 + .../common/132 get define/meson_options.txt | 1 + meson/test cases/common/133 c cpp and asm/main.c | 8 + meson/test cases/common/133 c cpp and asm/main.cpp | 11 + .../common/133 c cpp and asm/meson.build | 23 + .../common/133 c cpp and asm/retval-arm.S | 11 + .../common/133 c cpp and asm/retval-x86.S | 12 + .../common/133 c cpp and asm/retval-x86_64.S | 11 + .../test cases/common/133 c cpp and asm/somelib.c | 3 + .../common/133 c cpp and asm/symbol-underscore.h | 5 + .../test cases/common/134 compute int/config.h.in | 4 + meson/test cases/common/134 compute int/foobar.h | 6 + .../test cases/common/134 compute int/meson.build | 46 + meson/test cases/common/134 compute int/prog.c.in | 25 + .../135 custom target object output/meson.build | 16 + .../obj_generator.py | 18 + .../objdir/meson.build | 5 + .../objdir/source.c | 3 + .../progdir/meson.build | 1 + .../135 custom target object output/progdir/prog.c | 5 + .../common/136 empty build file/meson.build | 2 + .../common/136 empty build file/subdir/meson.build | 0 .../common/137 whole archive/exe/meson.build | 1 + .../common/137 whole archive/exe2/meson.build | 1 + .../common/137 whole archive/exe3/meson.build | 1 + .../common/137 whole archive/exe4/meson.build | 1 + meson/test cases/common/137 whole archive/func1.c | 7 + meson/test cases/common/137 whole archive/func2.c | 7 + .../common/137 whole archive/meson.build | 49 + meson/test cases/common/137 whole archive/mylib.h | 21 + meson/test cases/common/137 whole archive/prog.c | 5 + .../sh_func2_dep_func1/meson.build | 4 + .../sh_func2_linked_func1/meson.build | 3 + .../sh_func2_transdep_func1/meson.build | 6 + .../sh_only_link_whole/meson.build | 1 + .../common/137 whole archive/st_func1/meson.build | 1 + .../common/137 whole archive/st_func2/meson.build | 1 + meson/test cases/common/138 C and CPP link/dummy.c | 0 meson/test cases/common/138 C and CPP link/foo.c | 19 + meson/test cases/common/138 C and CPP link/foo.cpp | 34 + meson/test cases/common/138 C and CPP link/foo.h | 16 + meson/test cases/common/138 C and CPP link/foo.hpp | 24 + .../test cases/common/138 C and CPP link/foobar.c | 27 + .../test cases/common/138 C and CPP link/foobar.h | 16 + .../common/138 C and CPP link/meson.build | 133 + meson/test cases/common/138 C and CPP link/sub.c | 19 + meson/test cases/common/138 C and CPP link/sub.h | 16 + .../139 mesonintrospect from scripts/check_env.py | 28 + .../check_introspection.py | 18 + .../139 mesonintrospect from scripts/meson.build | 14 + .../common/14 configure file/basename.py | 28 + .../common/14 configure file/check_file.py | 34 + .../common/14 configure file/check_inputs.py | 14 + meson/test cases/common/14 configure file/config.h | 1 + .../common/14 configure file/config.h.in | 5 + .../common/14 configure file/config4a.h.in | 2 + .../common/14 configure file/config4b.h.in | 2 + .../common/14 configure file/config5.h.in | 1 + .../common/14 configure file/config6.h.in | 19 + .../common/14 configure file/config7.h.in | 16 + .../common/14 configure file/config8.h.in | 3 + meson/test cases/common/14 configure file/depfile | 0 .../14 configure file/differentafterbasename1.in | 0 .../14 configure file/differentafterbasename2.in | 0 .../test cases/common/14 configure file/dummy.dat | 0 .../test cases/common/14 configure file/dumpprog.c | 52 + .../common/14 configure file/file_contains.py | 22 + .../common/14 configure file/generator-deps.py | 19 + .../generator-without-input-file.py | 14 + .../common/14 configure file/generator.py | 17 + .../common/14 configure file/invalid-utf8.bin.in | Bin 0 -> 10 bytes .../common/14 configure file/meson.build | 309 + .../14 configure file/nosubst-nocopy1.txt.in | 1 + .../14 configure file/nosubst-nocopy2.txt.in | 1 + meson/test cases/common/14 configure file/prog.c | 17 + meson/test cases/common/14 configure file/prog2.c | 5 + meson/test cases/common/14 configure file/prog4.c | 6 + meson/test cases/common/14 configure file/prog5.c | 6 + meson/test cases/common/14 configure file/prog6.c | 11 + meson/test cases/common/14 configure file/prog7.c | 10 + meson/test cases/common/14 configure file/prog9.c | 18 + .../common/14 configure file/sameafterbasename.in | 0 .../common/14 configure file/sameafterbasename.in2 | 0 .../common/14 configure file/subdir/meson.build | 38 + .../test cases/common/14 configure file/test.json | 8 + .../test cases/common/14 configure file/test.py.in | 4 + meson/test cases/common/14 configure file/touch.py | 16 + .../generator.py | 14 + .../140 custom target multiple outputs/meson.build | 44 + .../140 custom target multiple outputs/test.json | 10 + .../common/141 special characters/arg-char-test.c | 10 + .../141 special characters/arg-string-test.c | 12 + .../141 special characters/arg-unquoted-test.c | 17 + .../common/141 special characters/check_quoting.py | 28 + .../common/141 special characters/meson.build | 75 + .../common/141 special characters/test.json | 6 + .../test cases/common/142 nested links/meson.build | 8 + meson/test cases/common/142 nested links/xephyr.c | 3 + .../test cases/common/143 list of file sources/foo | 1 + .../common/143 list of file sources/gen.py | 7 + .../common/143 list of file sources/meson.build | 12 + .../common/144 link depends custom target/foo.c | 15 + .../144 link depends custom target/make_file.py | 5 + .../144 link depends custom target/meson.build | 19 + .../145 recursive linking/3rdorderdeps/lib.c.in | 8 + .../145 recursive linking/3rdorderdeps/main.c.in | 16 + .../145 recursive linking/3rdorderdeps/meson.build | 49 + .../common/145 recursive linking/circular/lib1.c | 6 + .../common/145 recursive linking/circular/lib2.c | 6 + .../common/145 recursive linking/circular/lib3.c | 6 + .../common/145 recursive linking/circular/main.c | 28 + .../145 recursive linking/circular/meson.build | 5 + .../common/145 recursive linking/circular/prop1.c | 3 + .../common/145 recursive linking/circular/prop2.c | 3 + .../common/145 recursive linking/circular/prop3.c | 3 + .../145 recursive linking/edge-cases/libsto.c | 8 + .../145 recursive linking/edge-cases/meson.build | 9 + .../145 recursive linking/edge-cases/shstmain.c | 16 + .../145 recursive linking/edge-cases/stobuilt.c | 7 + .../145 recursive linking/edge-cases/stomain.c | 16 + .../test cases/common/145 recursive linking/lib.h | 17 + .../test cases/common/145 recursive linking/main.c | 46 + .../common/145 recursive linking/meson.build | 29 + .../common/145 recursive linking/shnodep/lib.c | 6 + .../145 recursive linking/shnodep/meson.build | 1 + .../common/145 recursive linking/shshdep/lib.c | 8 + .../145 recursive linking/shshdep/meson.build | 1 + .../common/145 recursive linking/shstdep/lib.c | 8 + .../145 recursive linking/shstdep/meson.build | 1 + .../common/145 recursive linking/stnodep/lib.c | 6 + .../145 recursive linking/stnodep/meson.build | 2 + .../common/145 recursive linking/stshdep/lib.c | 8 + .../145 recursive linking/stshdep/meson.build | 2 + .../common/145 recursive linking/ststdep/lib.c | 8 + .../145 recursive linking/ststdep/meson.build | 2 + meson/test cases/common/146 library at root/lib.c | 6 + .../common/146 library at root/main/main.c | 5 + .../common/146 library at root/main/meson.build | 2 + .../common/146 library at root/meson.build | 3 + meson/test cases/common/147 simd/fallback.c | 8 + .../common/147 simd/include/simdheader.h | 3 + meson/test cases/common/147 simd/meson.build | 44 + meson/test cases/common/147 simd/simd_avx.c | 49 + meson/test cases/common/147 simd/simd_avx2.c | 42 + meson/test cases/common/147 simd/simd_mmx.c | 67 + meson/test cases/common/147 simd/simd_neon.c | 20 + meson/test cases/common/147 simd/simd_sse.c | 29 + meson/test cases/common/147 simd/simd_sse2.c | 37 + meson/test cases/common/147 simd/simd_sse3.c | 38 + meson/test cases/common/147 simd/simd_sse41.c | 40 + meson/test cases/common/147 simd/simd_sse42.c | 43 + meson/test cases/common/147 simd/simd_ssse3.c | 48 + meson/test cases/common/147 simd/simdchecker.c | 143 + meson/test cases/common/147 simd/simdfuncs.h | 75 + .../meson.build | 20 + .../module.c | 16 + .../prog.c | 61 + meson/test cases/common/149 dotinclude/dotproc.c | 10 + meson/test cases/common/149 dotinclude/meson.build | 5 + meson/test cases/common/149 dotinclude/stdio.h | 6 + meson/test cases/common/15 if/meson.build | 72 + meson/test cases/common/15 if/prog.c | 1 + .../common/150 reserved targets/all/meson.build | 1 + .../150 reserved targets/benchmark/meson.build | 1 + .../150 reserved targets/clean-ctlist/meson.build | 1 + .../150 reserved targets/clean-gcda/meson.build | 1 + .../150 reserved targets/clean-gcno/meson.build | 1 + .../common/150 reserved targets/clean/meson.build | 1 + .../150 reserved targets/coverage-html/meson.build | 1 + .../150 reserved targets/coverage-text/meson.build | 1 + .../150 reserved targets/coverage-xml/meson.build | 1 + .../150 reserved targets/coverage/meson.build | 1 + .../common/150 reserved targets/dist/meson.build | 1 + .../150 reserved targets/distcheck/meson.build | 1 + .../150 reserved targets/install/meson.build | 1 + .../common/150 reserved targets/meson.build | 34 + .../common/150 reserved targets/phony/meson.build | 1 + .../150 reserved targets/reconfigure/meson.build | 1 + .../common/150 reserved targets/runtarget/echo.py | 6 + .../150 reserved targets/runtarget/meson.build | 2 + .../150 reserved targets/scan-build/meson.build | 1 + .../test cases/common/150 reserved targets/test.c | 3 + .../common/150 reserved targets/test/meson.build | 1 + .../150 reserved targets/uninstall/meson.build | 1 + .../common/151 duplicate source names/dir1/file.c | 16 + .../151 duplicate source names/dir1/meson.build | 1 + .../151 duplicate source names/dir2/dir1/file.c | 1 + .../common/151 duplicate source names/dir2/file.c | 1 + .../151 duplicate source names/dir2/meson.build | 1 + .../151 duplicate source names/dir3/dir1/file.c | 1 + .../common/151 duplicate source names/dir3/file.c | 1 + .../151 duplicate source names/dir3/meson.build | 1 + .../common/151 duplicate source names/meson.build | 19 + .../common/152 index customtarget/check_args.py | 18 + .../common/152 index customtarget/gen_sources.py | 49 + .../test cases/common/152 index customtarget/lib.c | 20 + .../common/152 index customtarget/meson.build | 80 + .../common/152 index customtarget/subdir/foo.c | 22 + .../152 index customtarget/subdir/meson.build | 19 + .../153 wrap file should not failed/meson.build | 16 + .../src/meson.build | 6 + .../src/subprojects/foo/prog2.c | 7 + .../src/subprojects/prog.c | 7 + .../153 wrap file should not failed/src/test.c | 9 + .../subprojects/.gitignore | 3 + .../subprojects/bar.wrap | 8 + .../subprojects/foo.wrap | 11 + .../subprojects/packagefiles/bar-1.0-patch.tar.xz | Bin 0 -> 244 bytes .../subprojects/packagefiles/bar-1.0.tar.xz | Bin 0 -> 200 bytes .../subprojects/packagefiles/foo-1.0/meson.build | 2 + .../subprojects/patchdir.wrap | 9 + .../subprojects/zlib-1.2.8/foo.c | 3 + .../subprojects/zlib-1.2.8/meson.build | 2 + .../subprojects/zlib.wrap | 10 + .../common/154 includedir subproj/meson.build | 9 + .../common/154 includedir subproj/prog.c | 3 + .../subprojects/inctest/include/incfile.h | 2 + .../subprojects/inctest/meson.build | 13 + .../common/155 subproject dir name collision/a.c | 13 + .../custom_subproject_dir/B/b.c | 20 + .../custom_subproject_dir/B/meson.build | 4 + .../custom_subproject_dir/C/c.c | 14 + .../custom_subproject_dir/C/meson.build | 2 + .../155 subproject dir name collision/meson.build | 12 + .../other_subdir/custom_subproject_dir/other.c | 19 + .../other_subdir/meson.build | 1 + .../common/156 config tool variable/meson.build | 31 + .../copyfile.py | 6 + .../meson.build | 7 + .../subdir/dep.dat | 1 + .../subdir/foo.c.in | 6 + .../subdir/meson.build | 6 + meson/test cases/common/158 disabler/meson.build | 153 + .../test cases/common/159 array option/meson.build | 17 + .../common/159 array option/meson_options.txt | 19 + meson/test cases/common/16 comparison/meson.build | 154 + meson/test cases/common/16 comparison/prog.c | 1 + .../checkcopy.py | 9 + .../foo.c.in | 6 + .../meson.build | 17 + .../common/161 not-found dependency/meson.build | 14 + .../161 not-found dependency/sub/meson.build | 1 + .../subprojects/trivial/meson.build | 3 + .../subprojects/trivial/trivial.c | 3 + .../common/161 not-found dependency/testlib.c | 0 .../common/162 subdir if_found/meson.build | 11 + .../common/162 subdir if_found/subdir/meson.build | 1 + .../meson.build | 1 + .../common/164 dependency factory/meson.build | 66 + .../common/165 get project license/bar.c | 6 + .../common/165 get project license/meson.build | 8 + meson/test cases/common/166 yield/meson.build | 7 + .../test cases/common/166 yield/meson_options.txt | 3 + .../common/166 yield/subprojects/sub/meson.build | 5 + .../166 yield/subprojects/sub/meson_options.txt | 3 + .../contrib/subprojects/alpha/a.c | 15 + .../contrib/subprojects/alpha/meson.build | 4 + .../alpha/var/subprojects/wrap_files_might_be_here | 1 + .../contrib/subprojects/beta/b.c | 14 + .../contrib/subprojects/beta/meson.build | 4 + .../meson.build | 11 + .../167 subproject nested subproject dirs/prog.c | 5 + .../test cases/common/168 preserve gendir/base.inp | 1 + .../168 preserve gendir/com/mesonbuild/subbie.inp | 1 + .../common/168 preserve gendir/genprog.py | 46 + .../common/168 preserve gendir/meson.build | 13 + .../common/168 preserve gendir/testprog.c | 6 + meson/test cases/common/169 source in dep/bar.cpp | 5 + meson/test cases/common/169 source in dep/foo.c | 3 + .../common/169 source in dep/generated/funname | 1 + .../169 source in dep/generated/genheader.py | 17 + .../common/169 source in dep/generated/main.c | 5 + .../common/169 source in dep/generated/meson.build | 12 + .../common/169 source in dep/meson.build | 8 + meson/test cases/common/17 array/func.c | 1 + meson/test cases/common/17 array/meson.build | 8 + meson/test cases/common/17 array/prog.c | 3 + .../common/170 generator link whole/export.h | 18 + .../common/170 generator link whole/generator.py | 30 + .../common/170 generator link whole/main.c | 11 + .../common/170 generator link whole/meson.build | 62 + .../meson_test_function.tmpl | 0 .../pull_meson_test_function.c | 6 + .../common/171 initial c_args/meson.build | 7 + .../test cases/common/171 initial c_args/test.json | 8 + .../foo.c | 1 + .../main.c | 16 + .../meson.build | 15 + .../subprojects/subproj/foo.c | 1 + .../subprojects/subproj/meson.build | 3 + meson/test cases/common/173 as-needed/config.h | 14 + meson/test cases/common/173 as-needed/libA.cpp | 7 + meson/test cases/common/173 as-needed/libA.h | 5 + meson/test cases/common/173 as-needed/libB.cpp | 19 + meson/test cases/common/173 as-needed/main.cpp | 7 + meson/test cases/common/173 as-needed/meson.build | 13 + .../common/174 ndebug if-release enabled/main.c | 15 + .../174 ndebug if-release enabled/meson.build | 7 + .../common/175 ndebug if-release disabled/main.c | 7 + .../175 ndebug if-release disabled/meson.build | 7 + .../common/176 subproject version/meson.build | 10 + .../subprojects/a/meson.build | 5 + .../test cases/common/177 subdir_done/meson.build | 12 + meson/test cases/common/178 bothlibraries/dummy.py | 8 + .../test cases/common/178 bothlibraries/libfile.c | 7 + meson/test cases/common/178 bothlibraries/main.c | 8 + .../common/178 bothlibraries/meson.build | 50 + meson/test cases/common/178 bothlibraries/mylib.h | 13 + .../common/179 escape and unicode/file.c.in | 5 + .../common/179 escape and unicode/file.py | 10 + .../common/179 escape and unicode/find.py | 9 + .../test cases/common/179 escape and unicode/fun.c | 3 + .../common/179 escape and unicode/main.c | 12 + .../common/179 escape and unicode/meson.build | 38 + .../test cases/common/18 includedir/include/func.h | 6 + meson/test cases/common/18 includedir/meson.build | 4 + meson/test cases/common/18 includedir/src/func.c | 5 + .../common/18 includedir/src/meson.build | 5 + meson/test cases/common/18 includedir/src/prog.c | 5 + .../test cases/common/180 has link arg/meson.build | 47 + .../common/181 same target name flat layout/foo.c | 1 + .../common/181 same target name flat layout/main.c | 16 + .../181 same target name flat layout/meson.build | 15 + .../181 same target name flat layout/subdir/foo.c | 1 + .../subdir/meson.build | 1 + .../common/182 find override/meson.build | 25 + .../common/182 find override/otherdir/main.c | 5 + .../common/182 find override/otherdir/main2.c | 5 + .../common/182 find override/otherdir/meson.build | 30 + .../common/182 find override/otherdir/source.desc | 1 + .../common/182 find override/otherdir/source2.desc | 1 + .../common/182 find override/subdir/converter.py | 15 + .../182 find override/subdir/gencodegen.py.in | 15 + .../common/182 find override/subdir/meson.build | 14 + .../common/182 find override/subprojects/sub.wrap | 5 + .../182 find override/subprojects/sub/meson.build | 4 + .../declare_dependency/headers/foo.c | 16 + .../declare_dependency/headers/foo.h | 16 + .../declare_dependency/main.c | 25 + .../declare_dependency/meson.build | 32 + .../declare_dependency/other.c | 20 + .../common/183 partial dependency/meson.build | 17 + meson/test cases/common/184 openmp/main.c | 16 + meson/test cases/common/184 openmp/main.cpp | 16 + meson/test cases/common/184 openmp/main.f90 | 9 + meson/test cases/common/184 openmp/meson.build | 58 + .../test cases/common/185 same target name/file.c | 3 + .../common/185 same target name/meson.build | 4 + .../common/185 same target name/sub/file2.c | 3 + .../common/185 same target name/sub/meson.build | 1 + meson/test cases/common/186 test depends/gen.py | 13 + meson/test cases/common/186 test depends/main.c | 1 + .../test cases/common/186 test depends/meson.build | 26 + meson/test cases/common/186 test depends/test.py | 20 + .../common/187 args flattening/meson.build | 31 + meson/test cases/common/188 dict/meson.build | 71 + meson/test cases/common/188 dict/prog.c | 8 + .../test cases/common/189 check header/meson.build | 48 + .../common/189 check header/ouagadougou.h | 1 + .../common/19 header in file list/header.h | 1 + .../common/19 header in file list/meson.build | 14 + .../common/19 header in file list/prog.c | 3 + .../test cases/common/190 install_mode/config.h.in | 5 + .../common/190 install_mode/data_source.txt | 1 + meson/test cases/common/190 install_mode/foo.1 | 1 + .../test cases/common/190 install_mode/meson.build | 59 + meson/test cases/common/190 install_mode/rootdir.h | 3 + .../common/190 install_mode/runscript.sh | 3 + meson/test cases/common/190 install_mode/stat.c | 1 + .../common/190 install_mode/sub1/second.dat | 1 + meson/test cases/common/190 install_mode/sub2/stub | 0 meson/test cases/common/190 install_mode/test.json | 15 + meson/test cases/common/190 install_mode/trivial.c | 6 + .../191 subproject array version/meson.build | 3 + .../subprojects/foo/meson.build | 1 + .../common/192 feature option/meson.build | 62 + .../common/192 feature option/meson_options.txt | 3 + .../common/193 feature option disabled/meson.build | 23 + .../193 feature option disabled/meson_options.txt | 3 + meson/test cases/common/194 static threads/lib1.c | 13 + meson/test cases/common/194 static threads/lib2.c | 5 + .../common/194 static threads/meson.build | 13 + meson/test cases/common/194 static threads/prog.c | 6 + .../com/mesonbuild/genprog.py | 46 + .../com/mesonbuild/meson.build | 10 + .../com/mesonbuild/subbie.inp | 1 + .../com/mesonbuild/testprog.c | 5 + .../common/195 generator in subdir/meson.build | 3 + .../196 subproject with features/meson.build | 17 + .../196 subproject with features/meson_options.txt | 3 + .../common/196 subproject with features/nothing.c | 4 + .../auto_sub_with_missing_dep/meson.build | 3 + .../subprojects/disabled_sub/lib/meson.build | 3 + .../subprojects/disabled_sub/lib/sub.c | 5 + .../subprojects/disabled_sub/lib/sub.h | 6 + .../subprojects/disabled_sub/meson.build | 3 + .../subprojects/sub/lib/meson.build | 2 + .../subprojects/sub/lib/sub.c | 5 + .../subprojects/sub/lib/sub.h | 6 + .../subprojects/sub/meson.build | 3 + .../common/197 function attributes/meson.build | 111 + .../197 function attributes/meson_options.txt | 7 + .../common/197 function attributes/test.json | 10 + .../common/198 broken subproject/meson.build | 2 + .../subprojects/broken/broken.c | 1 + .../subprojects/broken/meson.build | 4 + .../common/199 argument syntax/meson.build | 19 + meson/test cases/common/2 cpp/VERSIONFILE | 1 + meson/test cases/common/2 cpp/cpp.C | 6 + meson/test cases/common/2 cpp/meson.build | 41 + meson/test cases/common/2 cpp/something.txt | 1 + meson/test cases/common/2 cpp/trivial.cc | 6 + meson/test cases/common/20 global arg/meson.build | 16 + meson/test cases/common/20 global arg/prog.c | 43 + meson/test cases/common/20 global arg/prog.cc | 15 + .../200 install name_prefix name_suffix/libfile.c | 14 + .../meson.build | 13 + .../200 install name_prefix name_suffix/test.json | 19 + meson/test cases/common/201 kwarg entry/inc/prog.h | 3 + .../test cases/common/201 kwarg entry/meson.build | 7 + meson/test cases/common/201 kwarg entry/prog.c | 7 + meson/test cases/common/201 kwarg entry/test.json | 6 + .../202 custom target build by default/docgen.py | 12 + .../202 custom target build by default/meson.build | 10 + .../202 custom target build by default/test.json | 5 + .../common/203 find_library and headers/foo.h | 1 + .../203 find_library and headers/meson.build | 23 + .../common/204 line continuation/meson.build | 17 + .../common/205 native file path override/main.cpp | 5 + .../205 native file path override/meson.build | 7 + .../205 native file path override/nativefile.ini | 2 + .../common/205 native file path override/test.json | 6 + meson/test cases/common/206 tap tests/cat.c | 26 + .../test cases/common/206 tap tests/issue7515.txt | 27 + meson/test cases/common/206 tap tests/meson.build | 14 + meson/test cases/common/206 tap tests/tester.c | 10 + .../test cases/common/207 warning level 0/main.cpp | 12 + .../common/207 warning level 0/meson.build | 3 + .../common/208 link custom/custom_stlib.py | 81 + .../common/208 link custom/custom_target.c | 6 + .../common/208 link custom/custom_target.py | 6 + meson/test cases/common/208 link custom/dummy.c | 1 + meson/test cases/common/208 link custom/lib.c | 7 + .../test cases/common/208 link custom/meson.build | 86 + meson/test cases/common/208 link custom/outerlib.c | 3 + meson/test cases/common/208 link custom/prog.c | 6 + .../generate_conflicting_stlibs.py | 90 + .../meson.build | 42 + .../209 link custom_i single from multiple/prog.c | 5 + meson/test cases/common/21 target arg/func.c | 9 + meson/test cases/common/21 target arg/func2.c | 9 + meson/test cases/common/21 target arg/meson.build | 9 + meson/test cases/common/21 target arg/prog.cc | 13 + meson/test cases/common/21 target arg/prog2.cc | 13 + .../generate_stlibs.py | 92 + .../meson.build | 42 + .../prog.c | 8 + .../211 dependency get_variable method/meson.build | 66 + .../common/212 source set configuration_data/a.c | 8 + .../common/212 source set configuration_data/all.h | 9 + .../common/212 source set configuration_data/f.c | 7 + .../common/212 source set configuration_data/g.c | 6 + .../212 source set configuration_data/meson.build | 54 + .../212 source set configuration_data/nope.c | 3 + .../212 source set configuration_data/subdir/b.c | 13 + .../subdir/meson.build | 1 + .../common/213 source set dictionary/a.c | 8 + .../common/213 source set dictionary/all.h | 9 + .../common/213 source set dictionary/f.c | 7 + .../common/213 source set dictionary/g.c | 6 + .../common/213 source set dictionary/meson.build | 56 + .../common/213 source set dictionary/nope.c | 3 + .../common/213 source set dictionary/subdir/b.c | 13 + .../213 source set dictionary/subdir/meson.build | 1 + .../common/214 source set custom target/a.c | 7 + .../common/214 source set custom target/all.h | 2 + .../common/214 source set custom target/cp.py | 5 + .../common/214 source set custom target/f.c | 5 + .../common/214 source set custom target/g.c | 5 + .../214 source set custom target/meson.build | 28 + .../boards/arm/aarch64.cc | 8 + .../boards/arm/arm.cc | 10 + .../boards/arm/arm.h | 12 + .../boards/arm/arm32.cc | 8 + .../boards/arm/versatilepb.cc | 16 + .../boards/arm/virt.cc | 16 + .../boards/arm/xlnx_zcu102.cc | 16 + .../boards/meson.build | 7 + .../boards/x86/pc.cc | 26 + .../215 source set realistic example/common.h | 41 + .../config/aarch64 | 5 + .../215 source set realistic example/config/arm | 3 + .../215 source set realistic example/config/x86 | 4 + .../devices/meson.build | 3 + .../devices/virtio-mmio.cc | 16 + .../devices/virtio-pci.cc | 16 + .../devices/virtio.cc | 6 + .../devices/virtio.h | 10 + .../215 source set realistic example/main.cc | 32 + .../215 source set realistic example/meson.build | 52 + .../215 source set realistic example/not-found.cc | 8 + .../215 source set realistic example/was-found.cc | 7 + .../215 source set realistic example/zlib.cc | 15 + .../check_object.py | 13 + .../libdir/meson.build | 1 + .../libdir/source.c | 3 + .../meson.build | 18 + .../common/217 test priorities/meson.build | 22 + .../common/217 test priorities/testprog.py | 5 + .../common/218 include_dir dot/meson.build | 8 + meson/test cases/common/218 include_dir dot/rone.h | 1 + .../common/218 include_dir dot/src/main.c | 5 + .../common/218 include_dir dot/src/meson.build | 6 + .../common/218 include_dir dot/src/rone.c | 3 + .../common/219 include_type dependency/main.cpp | 8 + .../common/219 include_type dependency/meson.build | 44 + .../219 include_type dependency/pch/test.hpp | 1 + .../subprojects/subDep/meson.build | 3 + .../common/22 object extraction/check-obj.py | 21 + .../common/22 object extraction/header.h | 1 + meson/test cases/common/22 object extraction/lib.c | 3 + .../test cases/common/22 object extraction/lib2.c | 3 + .../test cases/common/22 object extraction/main.c | 5 + .../common/22 object extraction/meson.build | 35 + .../common/22 object extraction/src/lib.c | 3 + meson/test cases/common/220 fs module/meson.build | 144 + .../common/220 fs module/subdir/meson.build | 6 + .../common/220 fs module/subdir/subdirfile.txt | 1 + .../220 fs module/subprojects/subbie/meson.build | 11 + .../subprojects/subbie/subprojectfile.txt | 1 + .../subprojects/subbie/subsub/meson.build | 3 + .../subprojects/subbie/subsub/subsubfile.txt | 1 + meson/test cases/common/221 zlib/meson.build | 23 + .../common/222 native prop/crossfile.ini | 4 + .../test cases/common/222 native prop/meson.build | 49 + .../common/222 native prop/nativefile.ini | 3 + .../common/223 persubproject options/foo.c | 5 + .../common/223 persubproject options/meson.build | 14 + .../subprojects/sub1/foo.c | 8 + .../subprojects/sub1/meson.build | 9 + .../subprojects/sub2/foo.c | 9 + .../subprojects/sub2/meson.build | 10 + .../common/223 persubproject options/test.json | 7 + .../common/224 arithmetic operators/meson.build | 8 + .../common/225 link language/c_linkage.cpp | 5 + .../common/225 link language/c_linkage.h | 10 + meson/test cases/common/225 link language/lib.cpp | 5 + meson/test cases/common/225 link language/main.c | 5 + .../common/225 link language/meson.build | 18 + .../check_arch.py | 32 + .../226 link depends indexed custom target/foo.c | 15 + .../make_file.py | 8 + .../meson.build | 25 + .../common/227 very long commmand line/codegen.py | 7 + .../common/227 very long commmand line/main.c | 1 + .../common/227 very long commmand line/meson.build | 49 + .../common/227 very long commmand line/name_gen.py | 23 + meson/test cases/common/228 custom_target source/a | 0 .../common/228 custom_target source/meson.build | 5 + .../common/228 custom_target source/x.py | 5 + .../common/229 disabler array addition/meson.build | 9 + .../common/229 disabler array addition/test.c | 1 + meson/test cases/common/23 endian/meson.build | 7 + meson/test cases/common/23 endian/prog.c | 24 + meson/test cases/common/230 external project/app.c | 7 + .../test cases/common/230 external project/func.c | 7 + .../test cases/common/230 external project/func.h | 1 + .../common/230 external project/libfoo/configure | 44 + .../common/230 external project/libfoo/libfoo.c | 8 + .../common/230 external project/libfoo/libfoo.h | 3 + .../common/230 external project/libfoo/meson.build | 22 + .../common/230 external project/meson.build | 27 + .../common/230 external project/test.json | 7 + .../test cases/common/231 subdir files/meson.build | 3 + .../common/231 subdir files/subdir/meson.build | 1 + .../common/231 subdir files/subdir/prog.c | 1 + .../232 dependency allow_fallback/meson.build | 12 + .../subprojects/foob/meson.build | 2 + .../subprojects/foob3/meson.build | 2 + meson/test cases/common/233 wrap case/meson.build | 6 + meson/test cases/common/233 wrap case/prog.c | 13 + .../common/233 wrap case/subprojects/up_down.wrap | 5 + .../233 wrap case/subprojects/up_down/meson.build | 3 + .../233 wrap case/subprojects/up_down/up_down.h | 3 + .../common/234 get_file_contents/.gitattributes | 1 + .../common/234 get_file_contents/VERSION | 1 + .../common/234 get_file_contents/meson.build | 21 + .../common/234 get_file_contents/other/meson.build | 3 + .../common/234 get_file_contents/utf-16-text | Bin 0 -> 150 bytes .../235 invalid standard overriden to valid/main.c | 3 + .../meson.build | 8 + .../test.json | 9 + .../common/236 proper args splitting/main.c | 11 + .../common/236 proper args splitting/meson.build | 9 + .../common/236 proper args splitting/test.json | 9 + meson/test cases/common/237 fstrings/meson.build | 7 + .../bar/meson.build | 5 + .../meson.build | 5 + .../subprojects/baz.wrap | 3 + .../subprojects/baz/meson.build | 3 + .../subprojects/foo.wrap | 3 + .../subprojects/foo/meson.build | 9 + .../common/239 includedir violation/meson.build | 11 + .../subprojects/sub/include/placeholder.h | 3 + .../subprojects/sub/meson.build | 3 + .../common/239 includedir violation/test.json | 9 + meson/test cases/common/24 library versions/lib.c | 14 + .../common/24 library versions/meson.build | 9 + .../common/24 library versions/subdir/meson.build | 8 + .../common/24 library versions/test.json | 7 + .../meson.build | 18 + .../240 dependency native host == build/test.json | 14 + .../common/241 set and get variable/meson.build | 71 + .../common/241 set and get variable/test1.txt | 0 .../common/241 set and get variable/test2.txt | 0 .../common/242 custom target feed/data_source.txt | 1 + .../common/242 custom target feed/meson.build | 24 + .../common/242 custom target feed/my_compiler.py | 14 + .../common/242 custom target feed/test.json | 5 + meson/test cases/common/243 escape++/meson.build | 4 + meson/test cases/common/243 escape++/test.c | 3 + .../common/25 config subdir/include/config.h.in | 6 + .../common/25 config subdir/include/meson.build | 4 + .../test cases/common/25 config subdir/meson.build | 6 + .../common/25 config subdir/src/meson.build | 2 + .../test cases/common/25 config subdir/src/prog.c | 5 + .../test cases/common/26 find program/meson.build | 35 + .../26 find program/print-version-with-prefix.py | 8 + .../common/26 find program/print-version.py | 8 + .../common/26 find program/scripts/test_subdir.py | 3 + meson/test cases/common/26 find program/source.in | 3 + .../common/27 multiline string/meson.build | 37 + meson/test cases/common/28 try compile/invalid.c | 2 + meson/test cases/common/28 try compile/meson.build | 27 + meson/test cases/common/28 try compile/valid.c | 2 + meson/test cases/common/29 compiler id/meson.build | 15 + meson/test cases/common/3 static/libfile.c | 3 + meson/test cases/common/3 static/libfile2.c | 3 + meson/test cases/common/3 static/meson.build | 14 + meson/test cases/common/3 static/meson_options.txt | 1 + meson/test cases/common/30 sizeof/config.h.in | 2 + meson/test cases/common/30 sizeof/meson.build | 33 + meson/test cases/common/30 sizeof/prog.c.in | 15 + meson/test cases/common/31 define10/config.h.in | 2 + meson/test cases/common/31 define10/meson.build | 12 + meson/test cases/common/31 define10/prog.c | 13 + meson/test cases/common/32 has header/meson.build | 54 + .../test cases/common/32 has header/ouagadougou.h | 1 + .../test cases/common/33 run program/check-env.py | 6 + .../common/33 run program/get-version.py | 3 + meson/test cases/common/33 run program/meson.build | 85 + .../common/33 run program/scripts/hello.bat | 2 + .../common/33 run program/scripts/hello.sh | 3 + meson/test cases/common/34 logic ops/meson.build | 95 + .../common/35 string operations/meson.build | 122 + .../test cases/common/36 has function/meson.build | 116 + meson/test cases/common/37 has member/meson.build | 21 + meson/test cases/common/38 alignment/meson.build | 31 + meson/test cases/common/39 library chain/main.c | 5 + .../test cases/common/39 library chain/meson.build | 5 + .../common/39 library chain/subdir/lib1.c | 17 + .../common/39 library chain/subdir/meson.build | 4 + .../common/39 library chain/subdir/subdir2/lib2.c | 14 + .../39 library chain/subdir/subdir2/meson.build | 1 + .../common/39 library chain/subdir/subdir3/lib3.c | 14 + .../39 library chain/subdir/subdir3/meson.build | 1 + meson/test cases/common/39 library chain/test.json | 6 + meson/test cases/common/4 shared/libfile.c | 14 + meson/test cases/common/4 shared/meson.build | 13 + meson/test cases/common/40 options/meson.build | 45 + .../test cases/common/40 options/meson_options.txt | 9 + meson/test cases/common/41 test args/cmd_args.c | 18 + meson/test cases/common/41 test args/copyfile.py | 6 + meson/test cases/common/41 test args/env2vars.c | 23 + meson/test cases/common/41 test args/envvars.c | 23 + meson/test cases/common/41 test args/meson.build | 35 + meson/test cases/common/41 test args/tester.c | 34 + meson/test cases/common/41 test args/tester.py | 11 + meson/test cases/common/41 test args/testfile.txt | 1 + meson/test cases/common/42 subproject/meson.build | 28 + .../subprojects/sublib/include/subdefs.h | 21 + .../42 subproject/subprojects/sublib/meson.build | 19 + .../42 subproject/subprojects/sublib/simpletest.c | 5 + .../42 subproject/subprojects/sublib/sublib.c | 5 + meson/test cases/common/42 subproject/test.json | 7 + meson/test cases/common/42 subproject/user.c | 16 + .../common/43 subproject options/meson.build | 7 + .../common/43 subproject options/meson_options.txt | 1 + .../subprojects/subproject/meson.build | 5 + .../subprojects/subproject/meson_options.txt | 1 + .../common/44 pkgconfig-gen/dependencies/custom.c | 3 + .../common/44 pkgconfig-gen/dependencies/exposed.c | 3 + .../44 pkgconfig-gen/dependencies/internal.c | 3 + .../common/44 pkgconfig-gen/dependencies/main.c | 10 + .../44 pkgconfig-gen/dependencies/meson.build | 62 + .../test cases/common/44 pkgconfig-gen/meson.build | 126 + meson/test cases/common/44 pkgconfig-gen/simple.c | 5 + meson/test cases/common/44 pkgconfig-gen/simple.h | 6 + meson/test cases/common/44 pkgconfig-gen/simple5.c | 6 + meson/test cases/common/44 pkgconfig-gen/test.json | 15 + .../common/45 custom install dirs/datafile.cat | 1 + .../common/45 custom install dirs/meson.build | 11 + .../common/45 custom install dirs/prog.1 | 1 + .../common/45 custom install dirs/prog.c | 3 + .../common/45 custom install dirs/sample.h | 6 + .../45 custom install dirs/subdir/datafile.dog | 1 + .../common/45 custom install dirs/test.json | 16 + .../common/46 subproject subproject/meson.build | 11 + .../common/46 subproject subproject/prog.c | 5 + .../46 subproject subproject/subprojects/a/a.c | 15 + .../subprojects/a/meson.build | 4 + .../46 subproject subproject/subprojects/b/b.c | 14 + .../subprojects/b/meson.build | 3 + .../subprojects/c/meson.build | 3 + .../test cases/common/47 same file name/d1/file.c | 1 + .../test cases/common/47 same file name/d2/file.c | 1 + .../common/47 same file name/meson.build | 3 + meson/test cases/common/47 same file name/prog.c | 6 + meson/test cases/common/48 file grabber/a.c | 1 + meson/test cases/common/48 file grabber/b.c | 1 + meson/test cases/common/48 file grabber/c.c | 1 + .../test cases/common/48 file grabber/grabber.bat | 5 + meson/test cases/common/48 file grabber/grabber.sh | 5 + .../test cases/common/48 file grabber/grabber2.bat | 5 + .../test cases/common/48 file grabber/meson.build | 35 + meson/test cases/common/48 file grabber/prog.c | 7 + .../common/48 file grabber/subdir/meson.build | 5 + .../common/48 file grabber/subdir/suba.c | 1 + .../common/48 file grabber/subdir/subb.c | 1 + .../common/48 file grabber/subdir/subc.c | 1 + .../common/48 file grabber/subdir/subprog.c | 7 + .../common/49 custom target/data_source.txt | 1 + .../common/49 custom target/depfile/dep.py | 15 + .../common/49 custom target/depfile/meson.build | 7 + .../test cases/common/49 custom target/meson.build | 67 + .../common/49 custom target/my_compiler.py | 22 + meson/test cases/common/49 custom target/test.json | 5 + meson/test cases/common/5 linkstatic/libfile.c | 3 + meson/test cases/common/5 linkstatic/libfile2.c | 3 + meson/test cases/common/5 linkstatic/libfile3.c | 3 + meson/test cases/common/5 linkstatic/libfile4.c | 3 + meson/test cases/common/5 linkstatic/main.c | 5 + meson/test cases/common/5 linkstatic/meson.build | 6 + .../common/50 custom target chain/data_source.txt | 1 + .../common/50 custom target chain/meson.build | 34 + .../common/50 custom target chain/my_compiler.py | 15 + .../common/50 custom target chain/my_compiler2.py | 15 + .../common/50 custom target chain/test.json | 6 + .../50 custom target chain/usetarget/meson.build | 8 + .../50 custom target chain/usetarget/myexe.c | 6 + .../50 custom target chain/usetarget/subcomp.py | 7 + meson/test cases/common/51 run target/check-env.py | 23 + .../common/51 run target/check_exists.py | 7 + meson/test cases/common/51 run target/configure.in | 3 + meson/test cases/common/51 run target/converter.py | 6 + .../test cases/common/51 run target/fakeburner.py | 15 + .../test cases/common/51 run target/helloprinter.c | 11 + meson/test cases/common/51 run target/meson.build | 107 + .../common/51 run target/subdir/textprinter.py | 3 + .../common/52 object generator/meson.build | 34 + .../common/52 object generator/obj_generator.py | 18 + meson/test cases/common/52 object generator/prog.c | 7 + .../test cases/common/52 object generator/source.c | 3 + .../common/52 object generator/source2.c | 3 + .../common/52 object generator/source3.c | 3 + .../common/53 install script/customtarget.py | 19 + .../common/53 install script/meson.build | 45 + .../common/53 install script/myinstall.py | 31 + meson/test cases/common/53 install script/prog.c | 14 + .../common/53 install script/src/a file.txt | 0 .../test cases/common/53 install script/src/foo.c | 10 + .../common/53 install script/src/meson.build | 5 + .../common/53 install script/src/myinstall.py | 14 + .../test cases/common/53 install script/test.json | 15 + .../54 custom target source output/generator.py | 16 + .../common/54 custom target source output/main.c | 5 + .../54 custom target source output/meson.build | 9 + .../common/55 exe static shared/meson.build | 15 + .../test cases/common/55 exe static shared/prog.c | 10 + .../common/55 exe static shared/shlib2.c | 8 + .../test cases/common/55 exe static shared/stat.c | 7 + .../test cases/common/55 exe static shared/stat2.c | 3 + .../common/55 exe static shared/subdir/exports.h | 12 + .../common/55 exe static shared/subdir/meson.build | 1 + .../common/55 exe static shared/subdir/shlib.c | 5 + meson/test cases/common/56 array methods/a.txt | 0 meson/test cases/common/56 array methods/b.txt | 0 meson/test cases/common/56 array methods/c.txt | 0 .../test cases/common/56 array methods/meson.build | 70 + .../common/57 custom header generator/input.def | 1 + .../57 custom header generator/makeheader.py | 12 + .../common/57 custom header generator/meson.build | 21 + .../common/57 custom header generator/prog.c | 5 + .../common/57 custom header generator/somefile.txt | 0 .../common/58 multiple generators/data2.dat | 1 + .../common/58 multiple generators/main.cpp | 6 + .../common/58 multiple generators/meson.build | 13 + .../common/58 multiple generators/mygen.py | 22 + .../common/58 multiple generators/subdir/data.dat | 1 + .../58 multiple generators/subdir/meson.build | 4 + .../common/59 install subdir/meson.build | 21 + .../nested_elided/sub/dircheck/ninth.dat | 1 + .../59 install subdir/nested_elided/sub/eighth.dat | 1 + .../common/59 install subdir/sub/sub1/third.dat | 1 + .../common/59 install subdir/sub1/second.dat | 1 + .../sub2/dircheck/excluded-three.dat | 0 .../59 install subdir/sub2/excluded-three.dat | 0 .../common/59 install subdir/sub2/excluded/two.dat | 0 .../common/59 install subdir/sub2/one.dat | 0 .../sub_elided/dircheck/fifth.dat | 1 + .../common/59 install subdir/sub_elided/fourth.dat | 1 + .../common/59 install subdir/subdir/meson.build | 5 + .../common/59 install subdir/subdir/sub1/data1.dat | 1 + .../59 install subdir/subdir/sub1/sub2/data2.dat | 1 + .../subdir/sub_elided/dircheck/seventh.dat | 1 + .../59 install subdir/subdir/sub_elided/sixth.dat | 1 + .../test cases/common/59 install subdir/test.json | 17 + meson/test cases/common/6 linkshared/cpplib.cpp | 6 + meson/test cases/common/6 linkshared/cpplib.h | 12 + meson/test cases/common/6 linkshared/cppmain.cpp | 5 + meson/test cases/common/6 linkshared/libfile.c | 14 + meson/test cases/common/6 linkshared/main.c | 11 + meson/test cases/common/6 linkshared/meson.build | 12 + meson/test cases/common/6 linkshared/test.json | 6 + meson/test cases/common/60 foreach/meson.build | 53 + meson/test cases/common/60 foreach/prog1.c | 6 + meson/test cases/common/60 foreach/prog2.c | 6 + meson/test cases/common/60 foreach/prog3.c | 6 + meson/test cases/common/60 foreach/test.json | 10 + .../common/61 number arithmetic/meson.build | 76 + .../common/62 string arithmetic/meson.build | 16 + .../common/63 array arithmetic/meson.build | 15 + .../common/64 arithmetic bidmas/meson.build | 15 + meson/test cases/common/65 build always/main.c | 7 + .../test cases/common/65 build always/meson.build | 14 + .../test cases/common/65 build always/version.c.in | 3 + meson/test cases/common/65 build always/version.h | 3 + .../common/65 build always/version_gen.py | 29 + meson/test cases/common/66 vcstag/meson.build | 18 + meson/test cases/common/66 vcstag/tagprog.c | 9 + meson/test cases/common/66 vcstag/vcstag.c.in | 2 + meson/test cases/common/67 modules/meson.build | 14 + .../test cases/common/67 modules/meson_options.txt | 6 + meson/test cases/common/68 should fail/failing.c | 3 + meson/test cases/common/68 should fail/meson.build | 4 + .../inc/confdata.in | 1 + .../inc/meson.build | 6 + .../69 configure file in custom target/meson.build | 4 + .../src/meson.build | 20 + .../src/mycompiler.py | 9 + meson/test cases/common/7 mixed/func.c | 4 + meson/test cases/common/7 mixed/main.cc | 7 + meson/test cases/common/7 mixed/meson.build | 3 + .../common/70 external test program/meson.build | 3 + .../common/70 external test program/mytest.py | 10 + .../common/71 ctarget dependency/gen1.py | 12 + .../common/71 ctarget dependency/gen2.py | 10 + .../common/71 ctarget dependency/input.dat | 1 + .../common/71 ctarget dependency/meson.build | 20 + meson/test cases/common/72 shared subproject/a.c | 13 + .../common/72 shared subproject/meson.build | 10 + .../common/72 shared subproject/subprojects/B/b.c | 21 + .../72 shared subproject/subprojects/B/meson.build | 4 + .../common/72 shared subproject/subprojects/C/c.c | 14 + .../72 shared subproject/subprojects/C/meson.build | 2 + meson/test cases/common/73 shared subproject 2/a.c | 13 + .../common/73 shared subproject 2/meson.build | 13 + .../73 shared subproject 2/subprojects/B/b.c | 20 + .../subprojects/B/meson.build | 4 + .../73 shared subproject 2/subprojects/C/c.c | 14 + .../subprojects/C/meson.build | 2 + meson/test cases/common/74 file object/lib.c | 3 + meson/test cases/common/74 file object/meson.build | 9 + meson/test cases/common/74 file object/prog.c | 13 + .../test cases/common/74 file object/subdir1/lib.c | 3 + .../common/74 file object/subdir1/meson.build | 7 + .../common/74 file object/subdir1/prog.c | 13 + .../test cases/common/74 file object/subdir2/lib.c | 3 + .../common/74 file object/subdir2/meson.build | 7 + .../common/74 file object/subdir2/prog.c | 13 + .../test cases/common/75 custom subproject dir/a.c | 13 + .../custom_subproject_dir/B/b.c | 20 + .../custom_subproject_dir/B/meson.build | 4 + .../custom_subproject_dir/C/c.c | 14 + .../custom_subproject_dir/C/meson.build | 2 + .../common/75 custom subproject dir/meson.build | 10 + meson/test cases/common/76 has type/meson.build | 13 + .../77 extract from nested subdir/meson.build | 8 + .../src/first/lib_first.c | 3 + .../src/first/meson.build | 1 + .../77 extract from nested subdir/src/meson.build | 1 + .../tst/first/exe_first.c | 5 + .../tst/first/meson.build | 4 + .../77 extract from nested subdir/tst/meson.build | 1 + .../common/78 internal dependency/meson.build | 4 + .../78 internal dependency/proj1/include/proj1.h | 5 + .../78 internal dependency/proj1/meson.build | 11 + .../common/78 internal dependency/proj1/proj1f1.c | 6 + .../common/78 internal dependency/proj1/proj1f2.c | 6 + .../common/78 internal dependency/proj1/proj1f3.c | 6 + .../common/78 internal dependency/src/main.c | 10 + .../common/78 internal dependency/src/meson.build | 2 + meson/test cases/common/79 same basename/exe1.c | 5 + meson/test cases/common/79 same basename/exe2.c | 5 + meson/test cases/common/79 same basename/lib.c | 23 + .../test cases/common/79 same basename/meson.build | 14 + .../common/79 same basename/sharedsub/meson.build | 1 + .../common/79 same basename/staticsub/meson.build | 3 + meson/test cases/common/8 install/gendir.py | 8 + meson/test cases/common/8 install/meson.build | 10 + meson/test cases/common/8 install/prog.c | 3 + meson/test cases/common/8 install/stat.c | 1 + meson/test cases/common/8 install/test.json | 9 + .../common/80 declare dep/entity/entity.h | 4 + .../common/80 declare dep/entity/entity1.c | 9 + .../common/80 declare dep/entity/entity2.c | 5 + .../common/80 declare dep/entity/meson.build | 10 + meson/test cases/common/80 declare dep/main.c | 18 + meson/test cases/common/80 declare dep/meson.build | 24 + meson/test cases/common/81 extract all/extractor.h | 6 + meson/test cases/common/81 extract all/four.c | 5 + meson/test cases/common/81 extract all/meson.build | 13 + meson/test cases/common/81 extract all/one.c | 5 + meson/test cases/common/81 extract all/prog.c | 10 + meson/test cases/common/81 extract all/three.c | 5 + meson/test cases/common/81 extract all/two.c | 5 + .../test cases/common/82 add language/meson.build | 10 + meson/test cases/common/82 add language/prog.c | 6 + meson/test cases/common/82 add language/prog.cc | 6 + .../83 identical target name in subproject/bar.c | 6 + .../meson.build | 9 + .../subprojects/foo/bar.c | 6 + .../subprojects/foo/meson.build | 7 + .../subprojects/foo/true.py | 4 + .../83 identical target name in subproject/true.py | 4 + meson/test cases/common/84 plusassign/meson.build | 70 + meson/test cases/common/85 skip subdir/meson.build | 3 + .../common/85 skip subdir/subdir1/meson.build | 1 + .../85 skip subdir/subdir1/subdir2/meson.build | 1 + .../common/86 private include/meson.build | 4 + .../common/86 private include/stlib/compiler.py | 32 + .../common/86 private include/stlib/foo1.def | 0 .../common/86 private include/stlib/foo2.def | 0 .../common/86 private include/stlib/meson.build | 12 + .../common/86 private include/user/libuser.c | 6 + .../common/86 private include/user/meson.build | 5 + .../common/87 default options/meson.build | 33 + .../subprojects/sub1/meson.build | 3 + .../subprojects/sub1/meson_options.txt | 1 + meson/test cases/common/88 dep fallback/gensrc.py | 6 + .../test cases/common/88 dep fallback/meson.build | 38 + .../88 dep fallback/subprojects/boblib/bob.c | 8 + .../88 dep fallback/subprojects/boblib/bob.h | 6 + .../88 dep fallback/subprojects/boblib/genbob.py | 6 + .../88 dep fallback/subprojects/boblib/meson.build | 18 + .../subprojects/dummylib/meson.build | 4 + meson/test cases/common/88 dep fallback/tester.c | 14 + meson/test cases/common/89 default library/ef.cpp | 8 + meson/test cases/common/89 default library/ef.h | 22 + .../common/89 default library/eftest.cpp | 14 + .../common/89 default library/meson.build | 10 + .../test cases/common/9 header install/meson.build | 12 + meson/test cases/common/9 header install/rootdir.h | 3 + .../common/9 header install/sub/fileheader.h | 3 + .../common/9 header install/sub/meson.build | 2 + meson/test cases/common/9 header install/subdir.h | 3 + meson/test cases/common/9 header install/test.json | 8 + .../9 header install/vanishing_subdir/meson.build | 1 + .../9 header install/vanishing_subdir/vanished.h | 5 + meson/test cases/common/90 gen extra/meson.build | 40 + meson/test cases/common/90 gen extra/name.dat | 1 + meson/test cases/common/90 gen extra/name.l | 3 + meson/test cases/common/90 gen extra/plain.c | 5 + meson/test cases/common/90 gen extra/srcgen.py | 27 + meson/test cases/common/90 gen extra/srcgen2.py | 32 + meson/test cases/common/90 gen extra/srcgen3.py | 15 + meson/test cases/common/90 gen extra/upper.c | 5 + meson/test cases/common/91 benchmark/delayer.c | 20 + meson/test cases/common/91 benchmark/meson.build | 4 + .../test cases/common/92 test workdir/meson.build | 8 + meson/test cases/common/92 test workdir/opener.c | 12 + .../common/92 test workdir/subdir/checker.py | 5 + .../common/92 test workdir/subdir/meson.build | 4 + meson/test cases/common/93 suites/exe1.c | 6 + meson/test cases/common/93 suites/exe2.c | 6 + meson/test cases/common/93 suites/meson.build | 9 + .../common/93 suites/subprojects/sub/meson.build | 7 + .../common/93 suites/subprojects/sub/sub1.c | 6 + .../common/93 suites/subprojects/sub/sub2.c | 6 + meson/test cases/common/94 threads/meson.build | 16 + meson/test cases/common/94 threads/threadprog.c | 40 + meson/test cases/common/94 threads/threadprog.cpp | 43 + meson/test cases/common/95 manygen/depuser.c | 8 + meson/test cases/common/95 manygen/meson.build | 14 + .../common/95 manygen/subdir/funcinfo.def | 1 + .../test cases/common/95 manygen/subdir/manygen.py | 82 + .../common/95 manygen/subdir/meson.build | 26 + meson/test cases/common/96 stringdef/meson.build | 3 + meson/test cases/common/96 stringdef/stringdef.c | 10 + .../common/97 find program path/meson.build | 22 + .../common/97 find program path/program.py | 3 + .../common/98 subproject subdir/meson.build | 67 + .../test cases/common/98 subproject subdir/prog.c | 5 + .../subprojects/sub/lib/meson.build | 3 + .../98 subproject subdir/subprojects/sub/lib/sub.c | 5 + .../98 subproject subdir/subprojects/sub/lib/sub.h | 6 + .../subprojects/sub/meson.build | 2 + .../subprojects/sub_implicit.wrap | 6 + .../subprojects/sub_implicit/meson.build | 13 + .../subprojects/sub_implicit/meson_options.txt | 1 + .../sub_implicit/subprojects/subsub/foo.h | 1 + .../sub_implicit/subprojects/subsub/meson.build | 7 + .../subprojects/packagefiles/subsubsub-1.0.zip | Bin 0 -> 455 bytes .../subprojects/subsub/subprojects/subsubsub.wrap | 4 + .../subprojects/sub_novar/meson.build | 4 + .../common/98 subproject subdir/test.json | 5 + meson/test cases/common/99 postconf/meson.build | 5 + meson/test cases/common/99 postconf/postconf.py | 16 + meson/test cases/common/99 postconf/prog.c | 5 + meson/test cases/common/99 postconf/raw.dat | 1 + meson/test cases/csharp/1 basic/meson.build | 4 + meson/test cases/csharp/1 basic/prog.cs | 8 + meson/test cases/csharp/1 basic/test.json | 6 + meson/test cases/csharp/1 basic/text.cs | 7 + meson/test cases/csharp/2 library/helper.cs | 7 + meson/test cases/csharp/2 library/meson.build | 15 + meson/test cases/csharp/2 library/prog.cs | 8 + meson/test cases/csharp/2 library/test.json | 9 + meson/test cases/csharp/3 resource/TestRes.resx | 31 + meson/test cases/csharp/3 resource/meson.build | 6 + meson/test cases/csharp/3 resource/resprog.cs | 13 + meson/test cases/csharp/4 external dep/hello.txt | 1 + meson/test cases/csharp/4 external dep/meson.build | 9 + meson/test cases/csharp/4 external dep/prog.cs | 8 + meson/test cases/csharp/4 external dep/test.json | 5 + meson/test cases/cuda/1 simple/meson.build | 5 + meson/test cases/cuda/1 simple/prog.cu | 30 + .../cuda/10 cuda dependency/c/meson.build | 2 + meson/test cases/cuda/10 cuda dependency/c/prog.c | 19 + .../cuda/10 cuda dependency/cpp/meson.build | 2 + .../test cases/cuda/10 cuda dependency/cpp/prog.cc | 19 + .../test cases/cuda/10 cuda dependency/meson.build | 6 + .../cuda/10 cuda dependency/modules/meson.build | 2 + .../cuda/10 cuda dependency/modules/prog.cc | 33 + .../10 cuda dependency/version_reqs/meson.build | 2 + .../cuda/10 cuda dependency/version_reqs/prog.cc | 28 + .../cuda/11 cuda dependency (nvcc)/meson.build | 4 + .../11 cuda dependency (nvcc)/modules/meson.build | 2 + .../cuda/11 cuda dependency (nvcc)/modules/prog.cu | 33 + .../version_reqs/meson.build | 2 + .../11 cuda dependency (nvcc)/version_reqs/prog.cu | 29 + .../cuda/12 cuda dependency (mixed)/kernel.cu | 8 + .../cuda/12 cuda dependency (mixed)/meson.build | 4 + .../cuda/12 cuda dependency (mixed)/prog.cpp | 37 + .../cuda/13 cuda compiler setting/meson.build | 5 + .../cuda/13 cuda compiler setting/nativefile.ini | 5 + .../cuda/13 cuda compiler setting/prog.cu | 30 + .../cuda/14 cuda has header symbol/meson.build | 27 + meson/test cases/cuda/15 sanitizer/meson.build | 4 + meson/test cases/cuda/15 sanitizer/prog.cu | 30 + meson/test cases/cuda/16 multistd/main.cu | 20 + meson/test cases/cuda/16 multistd/meson.build | 4 + meson/test cases/cuda/2 split/lib.cu | 13 + meson/test cases/cuda/2 split/main.cpp | 7 + meson/test cases/cuda/2 split/meson.build | 7 + meson/test cases/cuda/2 split/static/lib.cu | 13 + meson/test cases/cuda/2 split/static/libsta.cu | 13 + .../test cases/cuda/2 split/static/main_static.cpp | 7 + meson/test cases/cuda/2 split/static/meson.build | 4 + meson/test cases/cuda/3 cudamodule/meson.build | 70 + meson/test cases/cuda/3 cudamodule/prog.cu | 30 + meson/test cases/cuda/4 shared/main.cu | 20 + meson/test cases/cuda/4 shared/meson.build | 6 + meson/test cases/cuda/4 shared/shared/kernels.cu | 14 + meson/test cases/cuda/4 shared/shared/kernels.h | 86 + meson/test cases/cuda/4 shared/shared/meson.build | 5 + meson/test cases/cuda/5 threads/main.cu | 20 + meson/test cases/cuda/5 threads/meson.build | 7 + meson/test cases/cuda/5 threads/shared/kernels.cu | 14 + meson/test cases/cuda/5 threads/shared/kernels.h | 86 + meson/test cases/cuda/5 threads/shared/meson.build | 5 + meson/test cases/cuda/6 std/main.cu | 20 + meson/test cases/cuda/6 std/meson.build | 4 + meson/test cases/cuda/7 static vs runtime/main.cu | 20 + .../cuda/7 static vs runtime/meson.build | 4 + meson/test cases/cuda/8 release/main.cu | 20 + meson/test cases/cuda/8 release/meson.build | 4 + meson/test cases/cuda/9 optimize for space/main.cu | 20 + .../cuda/9 optimize for space/meson.build | 4 + meson/test cases/cython/1 basic/cytest.py | 19 + meson/test cases/cython/1 basic/libdir/cstorer.pxd | 9 + meson/test cases/cython/1 basic/libdir/meson.build | 8 + meson/test cases/cython/1 basic/libdir/storer.c | 24 + meson/test cases/cython/1 basic/libdir/storer.h | 8 + meson/test cases/cython/1 basic/libdir/storer.pyx | 16 + meson/test cases/cython/1 basic/meson.build | 20 + .../cython/2 generated sources/configure.pyx.in | 2 + meson/test cases/cython/2 generated sources/g.in | 2 + meson/test cases/cython/2 generated sources/gen.py | 14 + .../cython/2 generated sources/generator.py | 12 + .../cython/2 generated sources/libdir/gen.py | 14 + .../cython/2 generated sources/libdir/meson.build | 10 + .../cython/2 generated sources/meson.build | 80 + .../test cases/cython/2 generated sources/test.py | 13 + meson/test cases/d/1 simple/app.d | 8 + meson/test cases/d/1 simple/meson.build | 4 + meson/test cases/d/1 simple/test.json | 6 + meson/test cases/d/1 simple/utils.d | 8 + meson/test cases/d/10 d cpp/cppmain.cpp | 18 + meson/test cases/d/10 d cpp/dmain.d | 5 + meson/test cases/d/10 d cpp/libfile.cpp | 5 + meson/test cases/d/10 d cpp/libfile.d | 5 + meson/test cases/d/10 d cpp/meson.build | 13 + meson/test cases/d/11 dub/meson.build | 23 + meson/test cases/d/11 dub/test.d | 14 + meson/test cases/d/2 static library/app.d | 8 + meson/test cases/d/2 static library/libstuff.d | 9 + meson/test cases/d/2 static library/meson.build | 5 + meson/test cases/d/2 static library/test.json | 7 + meson/test cases/d/3 shared library/app.d | 8 + meson/test cases/d/3 shared library/libstuff.d | 14 + meson/test cases/d/3 shared library/libstuff.di | 3 + meson/test cases/d/3 shared library/lld-test.py | 20 + meson/test cases/d/3 shared library/meson.build | 26 + meson/test cases/d/3 shared library/sub/libstuff.d | 14 + .../test cases/d/3 shared library/sub/meson.build | 2 + meson/test cases/d/3 shared library/test.json | 11 + meson/test cases/d/4 library versions/lib.d | 16 + meson/test cases/d/4 library versions/meson.build | 25 + meson/test cases/d/4 library versions/test.json | 25 + meson/test cases/d/5 mixed/app.d | 8 + meson/test cases/d/5 mixed/libstuff.c | 18 + meson/test cases/d/5 mixed/meson.build | 9 + meson/test cases/d/5 mixed/test.json | 13 + meson/test cases/d/6 unittest/app.d | 38 + meson/test cases/d/6 unittest/meson.build | 8 + meson/test cases/d/6 unittest/second_unit.d | 10 + meson/test cases/d/6 unittest/test.json | 6 + meson/test cases/d/7 multilib/app.d | 9 + meson/test cases/d/7 multilib/meson.build | 24 + meson/test cases/d/7 multilib/say1.d | 15 + meson/test cases/d/7 multilib/say1.di | 1 + meson/test cases/d/7 multilib/say2.d | 15 + meson/test cases/d/7 multilib/say2.di | 1 + meson/test cases/d/7 multilib/test.json | 18 + .../test cases/d/8 has multi arguments/meson.build | 8 + meson/test cases/d/9 features/app.d | 82 + meson/test cases/d/9 features/data/food.txt | 6 + meson/test cases/d/9 features/data/people.txt | 5 + meson/test cases/d/9 features/extra.d | 9 + meson/test cases/d/9 features/meson.build | 106 + .../failing build/1 vala c werror/meson.build | 10 + .../failing build/1 vala c werror/prog.vala | 7 + .../failing build/1 vala c werror/unused-var.c | 8 + .../test cases/failing build/2 hidden symbol/bob.c | 5 + .../test cases/failing build/2 hidden symbol/bob.h | 3 + .../failing build/2 hidden symbol/bobuser.c | 5 + .../failing build/2 hidden symbol/meson.build | 11 + .../failing build/3 pch disabled/c/meson.build | 2 + .../failing build/3 pch disabled/c/pch/prog.h | 1 + .../failing build/3 pch disabled/c/pch/prog_pch.c | 5 + .../failing build/3 pch disabled/c/prog.c | 10 + .../failing build/3 pch disabled/meson.build | 5 + .../4 cmake subproject isolation/incDir/fileA.hpp | 3 + .../4 cmake subproject isolation/main.cpp | 10 + .../4 cmake subproject isolation/meson.build | 17 + .../subprojects/cmMod/CMakeLists.txt | 10 + .../subprojects/cmMod/cmMod.cpp | 12 + .../subprojects/cmMod/cmMod.hpp | 14 + .../failing build/5 failed pickled/false.py | 4 + .../failing build/5 failed pickled/meson.build | 7 + meson/test cases/failing test/1 trivial/main.c | 3 + .../test cases/failing test/1 trivial/meson.build | 3 + meson/test cases/failing test/2 signal/main.c | 6 + meson/test cases/failing test/2 signal/meson.build | 7 + meson/test cases/failing test/3 ambiguous/main.c | 6 + .../failing test/3 ambiguous/meson.build | 10 + .../failing test/3 ambiguous/test_runner.sh | 7 + meson/test cases/failing test/4 hard error/main.c | 3 + .../failing test/4 hard error/meson.build | 4 + .../failing test/5 tap tests/meson.build | 9 + meson/test cases/failing test/5 tap tests/tester.c | 10 + .../failing test/5 tap tests/tester_with_status.c | 8 + meson/test cases/failing test/6 xpass/meson.build | 4 + meson/test cases/failing test/6 xpass/xpass.c | 1 + .../failing/1 project not first/meson.build | 4 + .../test cases/failing/1 project not first/prog.c | 1 + .../failing/1 project not first/test.json | 7 + .../failing/10 out of bounds/meson.build | 4 + .../test cases/failing/10 out of bounds/test.json | 7 + meson/test cases/failing/100 no lang/main.c | 3 + meson/test cases/failing/100 no lang/meson.build | 2 + meson/test cases/failing/100 no lang/test.json | 7 + .../101 no glib-compile-resources/meson.build | 8 + .../101 no glib-compile-resources/test.json | 7 + .../trivial.gresource.xml | 3 + .../failing/102 number in combo/meson.build | 1 + .../failing/102 number in combo/nativefile.ini | 2 + .../failing/102 number in combo/test.json | 5 + .../failing/103 bool in combo/meson.build | 1 + .../failing/103 bool in combo/meson_options.txt | 5 + .../failing/103 bool in combo/nativefile.ini | 2 + .../test cases/failing/103 bool in combo/test.json | 5 + .../failing/104 compiler no lang/meson.build | 2 + .../failing/104 compiler no lang/test.json | 7 + .../test cases/failing/105 no fallback/meson.build | 2 + .../105 no fallback/subprojects/foob/meson.build | 2 + meson/test cases/failing/105 no fallback/test.json | 8 + .../failing/106 feature require/meson.build | 2 + .../failing/106 feature require/meson_options.txt | 2 + .../failing/106 feature require/test.json | 8 + .../107 no build get_external_property/meson.build | 3 + .../107 no build get_external_property/test.json | 7 + .../failing/108 enter subdir twice/meson.build | 3 + .../failing/108 enter subdir twice/sub/meson.build | 1 + .../failing/108 enter subdir twice/test.json | 7 + .../failing/109 invalid fstring/meson.build | 4 + .../failing/109 invalid fstring/test.json | 7 + .../failing/11 object arithmetic/meson.build | 3 + .../failing/11 object arithmetic/test.json | 8 + .../failing/110 invalid fstring/meson.build | 3 + .../failing/110 invalid fstring/test.json | 7 + .../111 compiler argument checking/meson.build | 4 + .../111 compiler argument checking/test.json | 7 + .../failing/112 empty fallback/meson.build | 6 + .../112 empty fallback/subprojects/foo/meson.build | 3 + .../failing/112 empty fallback/test.json | 7 + .../113 cmake executable dependency/meson.build | 9 + .../subprojects/cmlib/CMakeLists.txt | 5 + .../subprojects/cmlib/main.c | 3 + .../113 cmake executable dependency/test.json | 7 + .../114 allow_fallback with fallback/meson.build | 3 + .../114 allow_fallback with fallback/test.json | 8 + .../failing/12 string arithmetic/meson.build | 3 + .../failing/12 string arithmetic/test.json | 8 + .../failing/13 array arithmetic/meson.build | 3 + .../failing/13 array arithmetic/test.json | 7 + .../failing/14 invalid option name/meson.build | 1 + .../14 invalid option name/meson_options.txt | 1 + .../failing/14 invalid option name/test.json | 7 + .../failing/15 kwarg before arg/meson.build | 3 + .../test cases/failing/15 kwarg before arg/prog.c | 1 + .../failing/15 kwarg before arg/test.json | 7 + .../failing/16 extract from subproject/main.c | 5 + .../failing/16 extract from subproject/meson.build | 9 + .../subprojects/sub_project/meson.build | 3 + .../subprojects/sub_project/sub_lib.c | 3 + .../failing/16 extract from subproject/test.json | 7 + meson/test cases/failing/17 same target/file.c | 1 + .../test cases/failing/17 same target/meson.build | 4 + meson/test cases/failing/17 same target/test.json | 7 + .../failing/18 wrong plusassign/meson.build | 3 + .../failing/18 wrong plusassign/test.json | 7 + meson/test cases/failing/19 target clash/clash.c | 6 + .../test cases/failing/19 target clash/meson.build | 15 + meson/test cases/failing/19 target clash/test.json | 7 + .../test cases/failing/2 missing file/meson.build | 3 + meson/test cases/failing/2 missing file/test.json | 7 + meson/test cases/failing/20 version/meson.build | 1 + meson/test cases/failing/20 version/test.json | 8 + meson/test cases/failing/21 subver/meson.build | 3 + .../failing/21 subver/subprojects/foo/meson.build | 1 + meson/test cases/failing/21 subver/test.json | 7 + meson/test cases/failing/22 assert/meson.build | 3 + meson/test cases/failing/22 assert/test.json | 7 + .../test cases/failing/23 rel testdir/meson.build | 4 + meson/test cases/failing/23 rel testdir/simple.c | 3 + meson/test cases/failing/23 rel testdir/test.json | 7 + .../failing/24 int conversion/meson.build | 3 + .../test cases/failing/24 int conversion/test.json | 7 + meson/test cases/failing/25 badlang/meson.build | 3 + meson/test cases/failing/25 badlang/test.json | 7 + meson/test cases/failing/26 output subdir/foo.in | 1 + .../failing/26 output subdir/meson.build | 5 + .../failing/26 output subdir/subdir/dummy.txt | 2 + .../test cases/failing/26 output subdir/test.json | 7 + meson/test cases/failing/27 noprog use/meson.build | 9 + meson/test cases/failing/27 noprog use/test.json | 7 + .../test cases/failing/28 no crossprop/meson.build | 3 + meson/test cases/failing/28 no crossprop/test.json | 7 + .../failing/29 nested ternary/meson.build | 3 + .../test cases/failing/29 nested ternary/test.json | 7 + .../failing/3 missing subdir/meson.build | 3 + .../test cases/failing/3 missing subdir/test.json | 9 + .../failing/30 invalid man extension/foo.a1 | 0 .../failing/30 invalid man extension/meson.build | 2 + .../failing/30 invalid man extension/test.json | 7 + meson/test cases/failing/31 no man extension/foo | 0 .../failing/31 no man extension/meson.build | 2 + .../failing/31 no man extension/test.json | 7 + .../failing/32 exe static shared/meson.build | 11 + .../test cases/failing/32 exe static shared/prog.c | 10 + .../failing/32 exe static shared/shlib2.c | 16 + .../test cases/failing/32 exe static shared/stat.c | 3 + .../failing/32 exe static shared/test.json | 7 + .../failing/33 non-root subproject/meson.build | 3 + .../33 non-root subproject/some/meson.build | 1 + .../failing/33 non-root subproject/test.json | 7 + .../meson.build | 4 + .../test.json | 8 + .../failing/35 project argument after target/exe.c | 3 + .../35 project argument after target/meson.build | 7 + .../35 project argument after target/test.json | 7 + .../meson.build | 7 + .../test.json | 7 + .../meson.build | 8 + .../37 has function external dependency/mylib.c | 1 + .../37 has function external dependency/test.json | 7 + .../38 libdir must be inside prefix/meson.build | 6 + .../38 libdir must be inside prefix/test.json | 10 + .../failing/39 prefix absolute/meson.build | 2 + .../failing/39 prefix absolute/test.json | 11 + .../failing/4 missing meson.build/meson.build | 3 + .../failing/4 missing meson.build/subdir/dummy.txt | 1 + .../failing/4 missing meson.build/test.json | 9 + meson/test cases/failing/40 kwarg assign/dummy.c | 3 + .../test cases/failing/40 kwarg assign/meson.build | 4 + meson/test cases/failing/40 kwarg assign/prog.c | 3 + meson/test cases/failing/40 kwarg assign/test.json | 7 + .../41 custom target plainname many inputs/1.txt | 1 + .../41 custom target plainname many inputs/2.txt | 1 + .../catfiles.py | 9 + .../meson.build | 8 + .../test.json | 7 + .../generator.py | 16 + .../meson.build | 13 + .../test.json | 33 + .../failing/43 project name colon/meson.build | 1 + .../failing/43 project name colon/test.json | 7 + .../failing/44 abs subdir/bob/meson.build | 2 + meson/test cases/failing/44 abs subdir/meson.build | 6 + meson/test cases/failing/44 abs subdir/test.json | 7 + .../failing/45 abspath to srcdir/meson.build | 3 + .../failing/45 abspath to srcdir/test.json | 7 + .../46 pkgconfig variables reserved/meson.build | 16 + .../46 pkgconfig variables reserved/simple.c | 5 + .../46 pkgconfig variables reserved/simple.h | 6 + .../46 pkgconfig variables reserved/test.json | 7 + .../47 pkgconfig variables zero length/meson.build | 16 + .../47 pkgconfig variables zero length/simple.c | 5 + .../47 pkgconfig variables zero length/simple.h | 6 + .../47 pkgconfig variables zero length/test.json | 7 + .../meson.build | 16 + .../simple.c | 5 + .../simple.h | 6 + .../test.json | 7 + .../meson.build | 16 + .../49 pkgconfig variables not key value/simple.c | 5 + .../49 pkgconfig variables not key value/simple.h | 6 + .../49 pkgconfig variables not key value/test.json | 7 + .../failing/5 misplaced option/meson.build | 3 + .../failing/5 misplaced option/test.json | 7 + .../failing/50 executable comparison/meson.build | 6 + .../failing/50 executable comparison/prog.c | 1 + .../failing/50 executable comparison/test.json | 7 + .../failing/51 inconsistent comparison/meson.build | 7 + .../failing/51 inconsistent comparison/test.json | 7 + meson/test cases/failing/52 slashname/meson.build | 12 + .../failing/52 slashname/sub/meson.build | 2 + meson/test cases/failing/52 slashname/sub/prog.c | 6 + meson/test cases/failing/52 slashname/test.json | 7 + .../53 reserved meson prefix/meson-foo/meson.build | 0 .../failing/53 reserved meson prefix/meson.build | 3 + .../failing/53 reserved meson prefix/test.json | 7 + .../failing/54 wrong shared crate type/foo.rs | 0 .../failing/54 wrong shared crate type/meson.build | 7 + .../failing/54 wrong shared crate type/test.json | 7 + .../failing/55 wrong static crate type/foo.rs | 0 .../failing/55 wrong static crate type/meson.build | 7 + .../failing/55 wrong static crate type/test.json | 7 + .../failing/56 or on new line/meson.build | 7 + .../failing/56 or on new line/meson_options.txt | 1 + .../test cases/failing/56 or on new line/test.json | 7 + .../failing/57 link with executable/meson.build | 4 + .../failing/57 link with executable/module.c | 4 + .../failing/57 link with executable/prog.c | 5 + .../failing/57 link with executable/test.json | 7 + .../58 assign custom target index/meson.build | 24 + .../58 assign custom target index/test.json | 7 + .../failing/59 getoption prefix/meson.build | 5 + .../subprojects/abc/meson.build | 1 + .../subprojects/abc/meson_options.txt | 1 + .../failing/59 getoption prefix/test.json | 7 + .../failing/6 missing incdir/meson.build | 3 + .../test cases/failing/6 missing incdir/test.json | 7 + .../failing/60 bad option argument/meson.build | 3 + .../60 bad option argument/meson_options.txt | 1 + .../failing/60 bad option argument/test.json | 7 + .../failing/61 subproj filegrab/meson.build | 5 + .../test cases/failing/61 subproj filegrab/prog.c | 1 + .../61 subproj filegrab/subprojects/a/meson.build | 3 + .../failing/61 subproj filegrab/test.json | 7 + .../test cases/failing/62 grab subproj/meson.build | 7 + .../62 grab subproj/subprojects/foo/meson.build | 3 + .../failing/62 grab subproj/subprojects/foo/sub.c | 6 + meson/test cases/failing/62 grab subproj/test.json | 7 + .../test cases/failing/63 grab sibling/meson.build | 3 + .../63 grab sibling/subprojects/a/meson.build | 3 + .../63 grab sibling/subprojects/b/meson.build | 3 + .../failing/63 grab sibling/subprojects/b/sneaky.c | 6 + meson/test cases/failing/63 grab sibling/test.json | 7 + .../failing/64 string as link target/meson.build | 2 + .../failing/64 string as link target/prog.c | 1 + .../failing/64 string as link target/test.json | 7 + .../meson.build | 2 + .../65 dependency not-found and required/test.json | 7 + .../failing/66 subproj different versions/main.c | 9 + .../66 subproj different versions/meson.build | 9 + .../subprojects/a/a.c | 5 + .../subprojects/a/a.h | 1 + .../subprojects/a/meson.build | 11 + .../subprojects/b/b.c | 5 + .../subprojects/b/b.h | 1 + .../subprojects/b/meson.build | 11 + .../subprojects/c/c.h | 3 + .../subprojects/c/meson.build | 5 + .../66 subproj different versions/test.json | 7 + .../failing/67 wrong boost module/meson.build | 9 + .../failing/67 wrong boost module/test.json | 7 + .../68 install_data rename bad size/file1.txt | 0 .../68 install_data rename bad size/file2.txt | 0 .../68 install_data rename bad size/meson.build | 3 + .../68 install_data rename bad size/test.json | 7 + .../failing/69 skip only subdir/meson.build | 8 + .../failing/69 skip only subdir/subdir/meson.build | 3 + .../failing/69 skip only subdir/test.json | 7 + .../failing/7 go to subproject/meson.build | 3 + .../7 go to subproject/subprojects/meson.build | 1 + .../failing/7 go to subproject/test.json | 7 + .../failing/70 dual override/meson.build | 5 + .../failing/70 dual override/overrides.py | 4 + .../test cases/failing/70 dual override/test.json | 7 + .../failing/71 override used/meson.build | 5 + meson/test cases/failing/71 override used/other.py | 3 + .../failing/71 override used/something.py | 3 + .../test cases/failing/71 override used/test.json | 7 + .../72 run_command unclean exit/meson.build | 4 + .../72 run_command unclean exit/returncode.py | 4 + .../failing/72 run_command unclean exit/test.json | 8 + .../73 int literal leading zero/meson.build | 6 + .../failing/73 int literal leading zero/test.json | 8 + .../failing/74 configuration immutable/input | 0 .../failing/74 configuration immutable/meson.build | 12 + .../failing/74 configuration immutable/test.json | 7 + .../75 link with shared module on osx/meson.build | 8 + .../75 link with shared module on osx/module.c | 3 + .../75 link with shared module on osx/prog.c | 4 + .../75 link with shared module on osx/test.json | 7 + .../config9.h.in | 1 + .../meson.build | 10 + .../test.json | 8 + .../meson.build | 2 + .../test.json | 7 + .../test cases/failing/78 unfound run/meson.build | 4 + meson/test cases/failing/78 unfound run/test.json | 7 + .../meson.build | 8 + .../79 framework dependency with version/test.json | 7 + meson/test cases/failing/8 recursive/meson.build | 3 + .../failing/8 recursive/subprojects/a/meson.build | 3 + .../failing/8 recursive/subprojects/b/meson.build | 3 + meson/test cases/failing/8 recursive/test.json | 7 + .../failing/80 override exe config/foo.c | 3 + .../failing/80 override exe config/meson.build | 6 + .../failing/80 override exe config/test.json | 7 + .../81 gl dependency with version/meson.build | 9 + .../81 gl dependency with version/test.json | 7 + .../82 threads dependency with version/meson.build | 3 + .../82 threads dependency with version/test.json | 7 + .../83 gtest dependency with version/meson.build | 8 + .../83 gtest dependency with version/test.json | 7 + meson/test cases/failing/84 dub libray/meson.build | 11 + meson/test cases/failing/84 dub libray/test.json | 7 + .../failing/85 dub executable/meson.build | 11 + .../test cases/failing/85 dub executable/test.json | 7 + .../test cases/failing/86 dub compiler/meson.build | 17 + meson/test cases/failing/86 dub compiler/test.json | 19 + .../failing/87 subproj not-found dep/meson.build | 2 + .../subprojects/somesubproj/meson.build | 3 + .../failing/87 subproj not-found dep/test.json | 7 + .../failing/88 invalid configure file/input | 0 .../failing/88 invalid configure file/meson.build | 9 + .../failing/88 invalid configure file/test.json | 7 + meson/test cases/failing/89 kwarg dupe/meson.build | 6 + meson/test cases/failing/89 kwarg dupe/prog.c | 6 + meson/test cases/failing/89 kwarg dupe/test.json | 7 + .../failing/9 missing extra file/meson.build | 3 + .../test cases/failing/9 missing extra file/prog.c | 3 + .../failing/9 missing extra file/test.json | 7 + .../failing/90 missing pch file/meson.build | 3 + .../test cases/failing/90 missing pch file/prog.c | 3 + .../failing/90 missing pch file/test.json | 8 + .../91 pch source different folder/include/pch.h | 0 .../91 pch source different folder/meson.build | 5 + .../failing/91 pch source different folder/prog.c | 1 + .../91 pch source different folder/src/pch.c | 0 .../91 pch source different folder/test.json | 7 + .../failing/92 unknown config tool/meson.build | 2 + .../failing/92 unknown config tool/test.json | 7 + .../93 custom target install data/Info.plist.cpp | 1 + .../93 custom target install data/meson.build | 11 + .../93 custom target install data/preproc.py | 13 + .../93 custom target install data/test.json | 7 + .../failing/94 add dict non string key/meson.build | 9 + .../failing/94 add dict non string key/test.json | 7 + .../failing/95 add dict duplicate keys/meson.build | 9 + .../failing/95 add dict duplicate keys/test.json | 7 + .../96 no host get_external_property/meson.build | 3 + .../96 no host get_external_property/test.json | 7 + .../failing/97 no native compiler/main.c | 3 + .../failing/97 no native compiler/meson.build | 12 + .../failing/97 no native compiler/test.json | 7 + .../failing/98 subdir parse error/meson.build | 2 + .../98 subdir parse error/subdir/meson.build | 1 + .../failing/98 subdir parse error/test.json | 7 + .../failing/99 invalid option file/meson.build | 1 + .../99 invalid option file/meson_options.txt | 1 + .../failing/99 invalid option file/test.json | 7 + meson/test cases/fortran/1 basic/meson.build | 13 + meson/test cases/fortran/1 basic/simple.f90 | 3 + meson/test cases/fortran/10 find library/gzip.f90 | 32 + meson/test cases/fortran/10 find library/main.f90 | 38 + .../test cases/fortran/10 find library/meson.build | 13 + .../fortran/11 compiles links runs/meson.build | 20 + meson/test cases/fortran/12 submodule/a1.f90 | 26 + meson/test cases/fortran/12 submodule/a2.f90 | 10 + meson/test cases/fortran/12 submodule/a3.f90 | 13 + meson/test cases/fortran/12 submodule/child.f90 | 14 + meson/test cases/fortran/12 submodule/meson.build | 13 + meson/test cases/fortran/12 submodule/parent.f90 | 26 + meson/test cases/fortran/13 coarray/main.f90 | 10 + meson/test cases/fortran/13 coarray/meson.build | 24 + meson/test cases/fortran/14 fortran links c/clib.c | 7 + .../test cases/fortran/14 fortran links c/clib.def | 2 + .../fortran/14 fortran links c/f_call_c.f90 | 11 + .../fortran/14 fortran links c/meson.build | 15 + meson/test cases/fortran/15 include/inc1.f90 | 5 + meson/test cases/fortran/15 include/inc2.f90 | 2 + .../fortran/15 include/include_hierarchy.f90 | 9 + .../fortran/15 include/include_syntax.f90 | 25 + meson/test cases/fortran/15 include/meson.build | 19 + .../subprojects/cmake_inc/CMakeLists.txt | 4 + .../15 include/subprojects/cmake_inc/main.f90 | 9 + .../15 include/subprojects/cmake_inc/thousand.f90 | 1 + meson/test cases/fortran/15 include/timestwo.f90 | 2 + meson/test cases/fortran/16 openmp/main.f90 | 18 + meson/test cases/fortran/16 openmp/meson.build | 34 + .../fortran/17 add_languages/meson.build | 5 + meson/test cases/fortran/18 first_arg/main.f90 | 3 + meson/test cases/fortran/18 first_arg/meson.build | 46 + meson/test cases/fortran/19 fortran_std/legacy.f | 8 + .../test cases/fortran/19 fortran_std/meson.build | 27 + .../test cases/fortran/19 fortran_std/std2003.f90 | 37 + .../test cases/fortran/19 fortran_std/std2008.f90 | 33 + .../test cases/fortran/19 fortran_std/std2018.f90 | 35 + meson/test cases/fortran/19 fortran_std/std95.f90 | 14 + meson/test cases/fortran/2 modules/comment_mod.f90 | 6 + meson/test cases/fortran/2 modules/meson.build | 9 + meson/test cases/fortran/2 modules/mymod.F90 | 8 + meson/test cases/fortran/2 modules/prog.f90 | 12 + meson/test cases/fortran/20 buildtype/main.f90 | 2 + meson/test cases/fortran/20 buildtype/meson.build | 5 + .../test cases/fortran/21 install static/main.f90 | 5 + .../fortran/21 install static/main_lib.f90 | 16 + .../fortran/21 install static/meson.build | 20 + .../subprojects/static_hello/meson.build | 12 + .../subprojects/static_hello/static_hello.f90 | 17 + .../test cases/fortran/21 install static/test.json | 10 + .../fortran/3 module procedure/meson.build | 5 + .../fortran/3 module procedure/use_syntax.f90 | 31 + .../fortran/4 self dependency/meson.build | 8 + .../fortran/4 self dependency/selfdep.f90 | 18 + .../fortran/4 self dependency/src/selfdep_mod.f90 | 6 + .../4 self dependency/subprojects/sub1/main.f90 | 6 + .../4 self dependency/subprojects/sub1/meson.build | 3 + meson/test cases/fortran/5 static/main.f90 | 6 + meson/test cases/fortran/5 static/meson.build | 6 + meson/test cases/fortran/5 static/static_hello.f90 | 17 + meson/test cases/fortran/6 dynamic/dynamic.f90 | 17 + meson/test cases/fortran/6 dynamic/main.f90 | 6 + meson/test cases/fortran/6 dynamic/meson.build | 13 + meson/test cases/fortran/7 generated/meson.build | 29 + meson/test cases/fortran/7 generated/mod1.fpp | 6 + meson/test cases/fortran/7 generated/mod2.fpp | 7 + meson/test cases/fortran/7 generated/mod3.fpp | 6 + meson/test cases/fortran/7 generated/prog.f90 | 8 + .../test cases/fortran/8 module names/meson.build | 6 + meson/test cases/fortran/8 module names/mod1.f90 | 6 + meson/test cases/fortran/8 module names/mod2.f90 | 6 + meson/test cases/fortran/8 module names/test.f90 | 9 + meson/test cases/fortran/9 cpp/fortran.f | 11 + meson/test cases/fortran/9 cpp/main.c | 8 + meson/test cases/fortran/9 cpp/main.cpp | 8 + meson/test cases/fortran/9 cpp/meson.build | 33 + meson/test cases/fpga/1 simple/meson.build | 9 + meson/test cases/fpga/1 simple/spin.pcf | 6 + meson/test cases/fpga/1 simple/spin.v | 32 + meson/test cases/frameworks/1 boost/extralib.cpp | 27 + meson/test cases/frameworks/1 boost/linkexe.cc | 18 + meson/test cases/frameworks/1 boost/meson.build | 72 + .../frameworks/1 boost/meson_options.txt | 1 + meson/test cases/frameworks/1 boost/nomod.cpp | 18 + .../frameworks/1 boost/partial_dep/foo.cpp | 20 + .../frameworks/1 boost/partial_dep/foo.hpp | 27 + .../frameworks/1 boost/partial_dep/main.cpp | 28 + .../frameworks/1 boost/partial_dep/meson.build | 31 + .../frameworks/1 boost/python_module.cpp | 22 + meson/test cases/frameworks/1 boost/test.json | 21 + .../frameworks/1 boost/test_python_module.py | 27 + meson/test cases/frameworks/1 boost/unit_test.cpp | 9 + .../frameworks/10 gtk-doc/doc/foobar-docs.sgml | 41 + .../10 gtk-doc/doc/foobar1/foobar-docs.sgml | 41 + .../10 gtk-doc/doc/foobar1/foobar-sections.txt | 16 + .../frameworks/10 gtk-doc/doc/foobar1/foobar.types | 4 + .../frameworks/10 gtk-doc/doc/foobar1/meson.build | 9 + .../10 gtk-doc/doc/foobar2/foobar-docs.sgml | 41 + .../frameworks/10 gtk-doc/doc/foobar2/meson.build | 6 + .../10 gtk-doc/doc/foobar3/foobar-docs.sgml | 41 + .../frameworks/10 gtk-doc/doc/foobar3/meson.build | 6 + .../10 gtk-doc/doc/foobar4/foobar-docs.sgml | 41 + .../frameworks/10 gtk-doc/doc/foobar4/meson.build | 7 + .../frameworks/10 gtk-doc/doc/meson.build | 10 + .../frameworks/10 gtk-doc/doc/version.xml.in | 1 + meson/test cases/frameworks/10 gtk-doc/foo.c | 30 + .../frameworks/10 gtk-doc/include/foo-version.h.in | 29 + .../test cases/frameworks/10 gtk-doc/include/foo.h | 33 + .../10 gtk-doc/include/generate-enums-docbook.py | 63 + .../frameworks/10 gtk-doc/include/meson.build | 17 + meson/test cases/frameworks/10 gtk-doc/meson.build | 39 + meson/test cases/frameworks/10 gtk-doc/test.json | 61 + .../11 gir subproject/gir/meson-subsample.c | 124 + .../11 gir subproject/gir/meson-subsample.h | 21 + .../frameworks/11 gir subproject/gir/meson.build | 40 + .../frameworks/11 gir subproject/gir/prog.c | 12 + .../frameworks/11 gir subproject/gir/prog.py | 6 + .../frameworks/11 gir subproject/meson.build | 20 + .../subprojects/mesongir/meson-sample.c | 127 + .../subprojects/mesongir/meson-sample.h | 26 + .../subprojects/mesongir/meson.build | 31 + .../frameworks/11 gir subproject/test.json | 12 + .../12 multiple gir/gir/meson-subsample.c | 124 + .../12 multiple gir/gir/meson-subsample.h | 17 + .../frameworks/12 multiple gir/gir/meson.build | 30 + .../frameworks/12 multiple gir/gir/prog.c | 12 + .../frameworks/12 multiple gir/meson.build | 12 + .../12 multiple gir/mesongir/meson-sample.c | 126 + .../12 multiple gir/mesongir/meson-sample.h.in | 22 + .../12 multiple gir/mesongir/meson.build | 38 + .../frameworks/12 multiple gir/test.json | 12 + .../frameworks/13 yelp/help/C/index.page | 8 + .../frameworks/13 yelp/help/C/media/test.txt | 1 + meson/test cases/frameworks/13 yelp/help/LINGUAS | 2 + meson/test cases/frameworks/13 yelp/help/de/de.po | 13 + meson/test cases/frameworks/13 yelp/help/es/es.po | 13 + .../frameworks/13 yelp/help/es/media/test.txt | 1 + .../test cases/frameworks/13 yelp/help/meson.build | 21 + meson/test cases/frameworks/13 yelp/meson.build | 8 + meson/test cases/frameworks/13 yelp/test.json | 22 + .../frameworks/14 doxygen/doc/Doxyfile.in | 2473 +++++ .../frameworks/14 doxygen/doc/meson.build | 17 + .../frameworks/14 doxygen/include/comedian.h | 17 + .../frameworks/14 doxygen/include/spede.h | 35 + meson/test cases/frameworks/14 doxygen/meson.build | 28 + .../test cases/frameworks/14 doxygen/src/spede.cpp | 49 + meson/test cases/frameworks/14 doxygen/test.json | 5 + meson/test cases/frameworks/15 llvm/meson.build | 51 + .../frameworks/15 llvm/meson_options.txt | 10 + meson/test cases/frameworks/15 llvm/sum.c | 76 + meson/test cases/frameworks/15 llvm/test.json | 17 + meson/test cases/frameworks/16 sdl2/meson.build | 13 + .../frameworks/16 sdl2/meson_options.txt | 6 + meson/test cases/frameworks/16 sdl2/sdl2prog.c | 33 + meson/test cases/frameworks/16 sdl2/test.json | 13 + meson/test cases/frameworks/17 mpi/main.c | 27 + meson/test cases/frameworks/17 mpi/main.cpp | 12 + meson/test cases/frameworks/17 mpi/main.f90 | 30 + meson/test cases/frameworks/17 mpi/meson.build | 52 + .../test cases/frameworks/17 mpi/meson_options.txt | 6 + meson/test cases/frameworks/17 mpi/test.json | 17 + meson/test cases/frameworks/18 vulkan/meson.build | 13 + meson/test cases/frameworks/18 vulkan/vulkanprog.c | 26 + meson/test cases/frameworks/19 pcap/meson.build | 22 + meson/test cases/frameworks/19 pcap/pcap_prog.c | 15 + meson/test cases/frameworks/2 gtest/meson.build | 14 + meson/test cases/frameworks/2 gtest/test.cc | 9 + meson/test cases/frameworks/2 gtest/test_nomain.cc | 14 + meson/test cases/frameworks/20 cups/cups_prog.c | 8 + meson/test cases/frameworks/20 cups/meson.build | 21 + .../test cases/frameworks/21 libwmf/libwmf_prog.c | 8 + meson/test cases/frameworks/21 libwmf/meson.build | 27 + .../22 gir link order/fake-gthread/fake-gthread.c | 6 + .../22 gir link order/fake-gthread/fake-gthread.h | 6 + .../22 gir link order/fake-gthread/meson.build | 12 + .../22 gir link order/get-prgname/get-prgname.c | 8 + .../22 gir link order/get-prgname/get-prgname.h | 6 + .../22 gir link order/get-prgname/meson.build | 13 + .../frameworks/22 gir link order/meson-sample.c | 48 + .../frameworks/22 gir link order/meson-sample.h | 17 + .../frameworks/22 gir link order/meson.build | 41 + meson/test cases/frameworks/23 hotdoc/doc/index.md | 1 + .../frameworks/23 hotdoc/doc/meson.build | 19 + .../frameworks/23 hotdoc/doc/sitemap.txt | 3 + meson/test cases/frameworks/23 hotdoc/meson.build | 15 + meson/test cases/frameworks/23 hotdoc/test.json | 8 + .../frameworks/24 libgcrypt/libgcrypt_prog.c | 8 + .../test cases/frameworks/24 libgcrypt/meson.build | 23 + meson/test cases/frameworks/25 hdf5/main.c | 30 + meson/test cases/frameworks/25 hdf5/main.cpp | 29 + meson/test cases/frameworks/25 hdf5/main.f90 | 17 + meson/test cases/frameworks/25 hdf5/meson.build | 47 + .../frameworks/25 hdf5/meson_options.txt | 6 + meson/test cases/frameworks/25 hdf5/test.json | 11 + meson/test cases/frameworks/26 netcdf/main.c | 14 + meson/test cases/frameworks/26 netcdf/main.cpp | 15 + meson/test cases/frameworks/26 netcdf/main.f90 | 19 + meson/test cases/frameworks/26 netcdf/meson.build | 36 + meson/test cases/frameworks/26 netcdf/test.json | 3 + meson/test cases/frameworks/27 gpgme/gpgme_prog.c | 8 + meson/test cases/frameworks/27 gpgme/meson.build | 27 + .../frameworks/28 gir link order 2/meson-sample.c | 42 + .../frameworks/28 gir link order 2/meson-sample.h | 17 + .../frameworks/28 gir link order 2/meson.build | 35 + .../28 gir link order 2/samelibname/meson.build | 5 + meson/test cases/frameworks/29 blocks/main.c | 6 + meson/test cases/frameworks/29 blocks/meson.build | 12 + meson/test cases/frameworks/29 blocks/test.json | 3 + meson/test cases/frameworks/3 gmock/gmocktest.cc | 27 + meson/test cases/frameworks/3 gmock/meson.build | 16 + .../30 scalapack/cmake/FindSCALAPACK.cmake | 220 + meson/test cases/frameworks/30 scalapack/main.c | 34 + meson/test cases/frameworks/30 scalapack/main.f90 | 25 + .../test cases/frameworks/30 scalapack/meson.build | 26 + meson/test cases/frameworks/30 scalapack/test.json | 3 + meson/test cases/frameworks/31 curses/main.c | 7 + meson/test cases/frameworks/31 curses/meson.build | 13 + .../frameworks/31 curses/meson_options.txt | 6 + meson/test cases/frameworks/31 curses/test.json | 11 + .../32 boost root/boost/include/boost/version.hpp | 3 + .../boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib | 0 .../boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib | 0 .../boost/lib/libboost_regex.so.0.1.0 | 0 .../frameworks/32 boost root/meson.build | 6 + .../frameworks/32 boost root/nativefile.ini.in | 2 + .../boost/extra-dir/include/boost/version.hpp | 3 + .../boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib | 0 .../boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib | 0 .../boost/lib/libboost_regex.so.0.2.0 | 0 .../frameworks/33 boost split root/meson.build | 6 + .../33 boost split root/nativefile.ini.in | 3 + .../frameworks/34 gir static lib/meson.build | 18 + .../34 gir static lib/statichelper/meson-sample.c | 126 + .../statichelper/meson-sample.h.in | 22 + .../34 gir static lib/statichelper/meson.build | 22 + .../34 gir static lib/subdir/gir/meson-subsample.c | 124 + .../34 gir static lib/subdir/gir/meson-subsample.h | 17 + .../34 gir static lib/subdir/gir/meson.build | 28 + .../frameworks/34 gir static lib/subdir/gir/prog.c | 12 + .../frameworks/34 gir static lib/test.json | 9 + meson/test cases/frameworks/4 qt/main.cpp | 55 + meson/test cases/frameworks/4 qt/mainWindow.cpp | 8 + meson/test cases/frameworks/4 qt/mainWindow.h | 20 + meson/test cases/frameworks/4 qt/mainWindow.ui | 54 + meson/test cases/frameworks/4 qt/manualinclude.cpp | 27 + meson/test cases/frameworks/4 qt/manualinclude.h | 22 + meson/test cases/frameworks/4 qt/meson.build | 158 + meson/test cases/frameworks/4 qt/meson_options.txt | 2 + meson/test cases/frameworks/4 qt/plugin/plugin.cpp | 12 + meson/test cases/frameworks/4 qt/plugin/plugin.h | 14 + .../test cases/frameworks/4 qt/plugin/plugin.json | 3 + .../frameworks/4 qt/pluginInterface/plugin_if.h | 21 + meson/test cases/frameworks/4 qt/q5core.cpp | 28 + meson/test cases/frameworks/4 qt/qt4_lang.qrc | 6 + meson/test cases/frameworks/4 qt/qt4core_fr.ts | 12 + meson/test cases/frameworks/4 qt/qt4embedded_fr.ts | 12 + meson/test cases/frameworks/4 qt/qt5_lang.qrc | 6 + meson/test cases/frameworks/4 qt/qt5core_fr.ts | 12 + meson/test cases/frameworks/4 qt/qt5embedded_fr.ts | 12 + meson/test cases/frameworks/4 qt/qtinterface.cpp | 8 + meson/test cases/frameworks/4 qt/stuff.qrc | 6 + meson/test cases/frameworks/4 qt/stuff2.qrc | 6 + .../frameworks/4 qt/subfolder/generator.py | 6 + .../test cases/frameworks/4 qt/subfolder/main.cpp | 29 + .../frameworks/4 qt/subfolder/meson.build | 32 + .../frameworks/4 qt/subfolder/resources/stuff3.qrc | 6 + .../4 qt/subfolder/resources/stuff4.qrc.in | 8 + .../frameworks/4 qt/subfolder/resources/thing.png | Bin 0 -> 40303 bytes meson/test cases/frameworks/4 qt/test.json | 11 + meson/test cases/frameworks/4 qt/thing.png | Bin 0 -> 40303 bytes meson/test cases/frameworks/4 qt/thing2.png | Bin 0 -> 40303 bytes .../5 protocol buffers/asubdir/defs.proto | 5 + .../frameworks/5 protocol buffers/asubdir/main.cpp | 9 + .../5 protocol buffers/asubdir/meson.build | 8 + .../frameworks/5 protocol buffers/defs.proto | 5 + .../frameworks/5 protocol buffers/main.cpp | 9 + .../frameworks/5 protocol buffers/meson.build | 22 + .../5 protocol buffers/sidedir/meson.build | 7 + .../5 protocol buffers/sidedir/sideprog.cpp | 16 + .../withpath/com/mesonbuild/simple.proto | 7 + .../withpath/com/mesonbuild/subsite/complex.proto | 10 + .../5 protocol buffers/withpath/meson.build | 13 + .../5 protocol buffers/withpath/pathprog.cpp | 16 + .../frameworks/6 gettext/data/data3/meson.build | 9 + .../6 gettext/data/data3/test.desktop.in | 6 + .../frameworks/6 gettext/data/meson.build | 58 + .../frameworks/6 gettext/data/test.desktop.in | 6 + .../frameworks/6 gettext/data/test2.desktop.in | 6 + .../frameworks/6 gettext/data/test5.desktop.in.in | 6 + .../frameworks/6 gettext/data/test6.desktop.in.in | 6 + .../frameworks/6 gettext/data2/meson.build | 9 + .../frameworks/6 gettext/data2/test.desktop.in | 6 + .../6 gettext/generated/desktopgenerator.py | 13 + .../frameworks/6 gettext/generated/meson.build | 16 + .../6 gettext/generated/something.desktop.in.in | 15 + meson/test cases/frameworks/6 gettext/meson.build | 24 + meson/test cases/frameworks/6 gettext/po/LINGUAS | 4 + meson/test cases/frameworks/6 gettext/po/POTFILES | 2 + meson/test cases/frameworks/6 gettext/po/de.po | 22 + meson/test cases/frameworks/6 gettext/po/fi.po | 22 + .../frameworks/6 gettext/po/intltest.pot | 34 + .../test cases/frameworks/6 gettext/po/meson.build | 3 + meson/test cases/frameworks/6 gettext/po/ru.po | 34 + .../test cases/frameworks/6 gettext/src/intlmain.c | 17 + .../frameworks/6 gettext/src/meson.build | 2 + meson/test cases/frameworks/6 gettext/test.json | 16 + .../7 gnome/gdbus/data/com.example.Sample.xml | 10 + .../frameworks/7 gnome/gdbus/gdbusprog.c | 8 + .../frameworks/7 gnome/gdbus/meson.build | 39 + .../frameworks/7 gnome/genmarshal/main.c | 102 + .../frameworks/7 gnome/genmarshal/marshaller.list | 3 + .../frameworks/7 gnome/genmarshal/meson.build | 12 + meson/test cases/frameworks/7 gnome/gir/copy.py | 18 + .../test cases/frameworks/7 gnome/gir/dep1/dep1.c | 56 + .../test cases/frameworks/7 gnome/gir/dep1/dep1.h | 23 + .../frameworks/7 gnome/gir/dep1/dep2/dep2.c | 124 + .../frameworks/7 gnome/gir/dep1/dep2/dep2.h | 21 + .../frameworks/7 gnome/gir/dep1/dep2/meson.build | 22 + .../frameworks/7 gnome/gir/dep1/dep3/dep3.c | 124 + .../frameworks/7 gnome/gir/dep1/dep3/dep3.h | 21 + .../frameworks/7 gnome/gir/dep1/dep3/meson.build | 22 + .../frameworks/7 gnome/gir/dep1/meson.build | 31 + .../frameworks/7 gnome/gir/meson-sample.c | 121 + .../frameworks/7 gnome/gir/meson-sample.h | 24 + .../frameworks/7 gnome/gir/meson-sample2.c | 45 + .../frameworks/7 gnome/gir/meson-sample2.h | 21 + .../test cases/frameworks/7 gnome/gir/meson.build | 64 + meson/test cases/frameworks/7 gnome/gir/prog.c | 35 + meson/test cases/frameworks/7 gnome/gir/prog.py | 11 + meson/test cases/frameworks/7 gnome/meson.build | 58 + .../frameworks/7 gnome/mkenums/enums.c.in | 41 + .../frameworks/7 gnome/mkenums/enums.h.in | 24 + .../frameworks/7 gnome/mkenums/enums2.c.in | 41 + .../frameworks/7 gnome/mkenums/enums2.h.in | 24 + meson/test cases/frameworks/7 gnome/mkenums/main.c | 30 + .../test cases/frameworks/7 gnome/mkenums/main4.c | 35 + .../test cases/frameworks/7 gnome/mkenums/main5.c | 35 + .../frameworks/7 gnome/mkenums/meson-decls.h | 2 + .../frameworks/7 gnome/mkenums/meson-sample.h | 20 + .../frameworks/7 gnome/mkenums/meson.build | 164 + .../frameworks/7 gnome/resources-data/meson.build | 18 + .../frameworks/7 gnome/resources-data/res1.txt | 1 + .../frameworks/7 gnome/resources-data/res3.txt.in | 1 + .../7 gnome/resources-data/subdir/meson.build | 8 + .../7 gnome/resources-data/subdir/res2.txt | 1 + .../7 gnome/resources-data/subdir/res4.txt.in | 1 + .../frameworks/7 gnome/resources/copyfile.py | 6 + .../frameworks/7 gnome/resources/generated-main.c | 27 + .../7 gnome/resources/generated.gresource.xml | 9 + .../frameworks/7 gnome/resources/meson.build | 55 + .../7 gnome/resources/myresource.gresource.xml | 9 + .../frameworks/7 gnome/resources/res3.txt | 1 + .../frameworks/7 gnome/resources/resources.py | 10 + .../frameworks/7 gnome/resources/simple-main.c | 27 + .../7 gnome/resources/simple.gresource.xml | 7 + .../7 gnome/schemas/com.github.meson.gschema.xml | 12 + .../frameworks/7 gnome/schemas/meson.build | 7 + .../frameworks/7 gnome/schemas/schemaprog.c | 47 + meson/test cases/frameworks/7 gnome/test.json | 32 + meson/test cases/frameworks/8 flex/lexer.l | 11 + meson/test cases/frameworks/8 flex/meson.build | 33 + meson/test cases/frameworks/8 flex/parser.y | 11 + meson/test cases/frameworks/8 flex/prog.c | 40 + meson/test cases/frameworks/8 flex/test.txt | 1 + meson/test cases/frameworks/9 wxwidgets/mainwin.h | 23 + .../test cases/frameworks/9 wxwidgets/meson.build | 19 + meson/test cases/frameworks/9 wxwidgets/wxprog.cpp | 56 + meson/test cases/frameworks/9 wxwidgets/wxstc.cpp | 6 + .../java/1 basic/com/mesonbuild/Simple.java | 7 + meson/test cases/java/1 basic/meson.build | 11 + meson/test cases/java/1 basic/test.json | 5 + meson/test cases/java/2 subdir/meson.build | 3 + .../java/2 subdir/sub/com/mesonbuild/Simple.java | 8 + .../2 subdir/sub/com/mesonbuild/TextPrinter.java | 14 + meson/test cases/java/2 subdir/sub/meson.build | 5 + .../java/3 args/com/mesonbuild/Simple.java | 7 + meson/test cases/java/3 args/meson.build | 9 + .../java/4 inner class/com/mesonbuild/Simple.java | 15 + meson/test cases/java/4 inner class/meson.build | 5 + .../java/5 includedirs/com/mesonbuild/Simple.java | 8 + .../5 includedirs/com/mesonbuild/TextPrinter.java | 14 + meson/test cases/java/5 includedirs/meson.build | 14 + .../java/6 codegen/com/mesonbuild/Config.java.in | 5 + .../java/6 codegen/com/mesonbuild/Simple.java | 12 + .../java/6 codegen/com/mesonbuild/TextPrinter.java | 14 + .../java/6 codegen/com/mesonbuild/meson.build | 6 + meson/test cases/java/6 codegen/meson.build | 15 + .../java/7 linking/com/mesonbuild/Linking.java | 9 + meson/test cases/java/7 linking/meson.build | 8 + .../7 linking/sub/com/mesonbuild/SimpleLib.java | 7 + meson/test cases/java/7 linking/sub/meson.build | 2 + .../com/mesonbuild/Config.java.in | 5 + .../com/mesonbuild/Simple.java | 12 + .../com/mesonbuild/TextPrinter.java | 14 + .../com/mesonbuild/meson.build | 8 + .../java/8 codegen custom target/meson.build | 15 + .../java/9 jdk/lib/com_mesonbuild_JdkTest.c | 9 + .../java/9 jdk/lib/com_mesonbuild_JdkTest.h | 21 + meson/test cases/java/9 jdk/lib/meson.build | 14 + meson/test cases/java/9 jdk/lib/native.c | 11 + meson/test cases/java/9 jdk/meson.build | 18 + .../java/9 jdk/src/com/mesonbuild/JdkTest.java | 15 + meson/test cases/java/9 jdk/src/meson.build | 17 + meson/test cases/keyval/1 basic/.config | 3 + meson/test cases/keyval/1 basic/meson.build | 18 + meson/test cases/keyval/1 basic/test.json | 7 + meson/test cases/keyval/2 subdir/.config | 2 + meson/test cases/keyval/2 subdir/dir/meson.build | 13 + meson/test cases/keyval/2 subdir/meson.build | 4 + .../keyval/3 load_config files/dir/config | 2 + .../keyval/3 load_config files/dir/meson.build | 13 + .../keyval/3 load_config files/meson.build | 4 + .../keyval/4 load_config builddir/config | 2 + .../keyval/4 load_config builddir/meson.build | 14 + .../linuxlike/1 pkg-config/incdir/myinc.h | 3 + .../test cases/linuxlike/1 pkg-config/meson.build | 53 + .../linuxlike/1 pkg-config/prog-checkver.c | 15 + meson/test cases/linuxlike/1 pkg-config/prog.c | 8 + .../linuxlike/10 large file support/meson.build | 16 + .../linuxlike/11 runpath rpath ldlibrarypath/lib.c | 3 + .../lib1/meson.build | 2 + .../lib2/meson.build | 3 + .../11 runpath rpath ldlibrarypath/main.c | 11 + .../11 runpath rpath ldlibrarypath/meson.build | 16 + .../linuxlike/12 subprojects in subprojects/main.c | 9 + .../12 subprojects in subprojects/meson.build | 9 + .../subprojects/a/a.c | 5 + .../subprojects/a/a.h | 1 + .../subprojects/a/meson.build | 11 + .../subprojects/b/b.c | 11 + .../subprojects/b/b.h | 1 + .../subprojects/b/meson.build | 17 + .../subprojects/c/c.h | 3 + .../subprojects/c/meson.build | 5 + .../linuxlike/13 cmake dependency/cmVers.sh | 6 + .../cmake/FindImportedTarget.cmake | 15 + .../cmake/FindSomethingLikeZLIB.cmake | 56 + .../cmake_fake1/cmMesonTestF1Config.cmake | 11 + .../cmake_fake2/cmMesonTestF2Config.cmake | 9 + .../13 cmake dependency/cmake_fake3/bin/.gitkeep | 0 .../cmake/cmMesonTestF3/cmMesonTestF3Config.cmake | 9 + .../cmMesonTestDep/cmMesonTestDepConfig.cmake | 9 + .../cmMesonVersionedTestDepConfig.cmake | 9 + .../cmMesonVersionedTestDepConfigVersion.cmake | 12 + .../linuxlike/13 cmake dependency/incdir/myinc.h | 3 + .../linuxlike/13 cmake dependency/meson.build | 95 + .../linuxlike/13 cmake dependency/prog-checkver.c | 15 + .../linuxlike/13 cmake dependency/prog.c | 8 + .../linuxlike/13 cmake dependency/test.json | 14 + .../linuxlike/13 cmake dependency/testFlagSet.c | 18 + .../linuxlike/14 static dynamic linkage/main.c | 7 + .../14 static dynamic linkage/meson.build | 36 + .../14 static dynamic linkage/verify_static.py | 29 + .../test cases/linuxlike/15 ld binary/meson.build | 4 + .../linuxlike/2 external library/meson.build | 43 + .../test cases/linuxlike/2 external library/prog.c | 8 + meson/test cases/linuxlike/3 linker script/bob.c | 9 + meson/test cases/linuxlike/3 linker script/bob.h | 6 + meson/test cases/linuxlike/3 linker script/bob.map | 6 + .../linuxlike/3 linker script/bob.map.in | 6 + meson/test cases/linuxlike/3 linker script/copy.py | 5 + .../linuxlike/3 linker script/meson.build | 62 + meson/test cases/linuxlike/3 linker script/prog.c | 5 + .../linuxlike/3 linker script/sub/foo.map | 6 + .../linuxlike/3 linker script/sub/meson.build | 6 + .../test cases/linuxlike/4 extdep static lib/lib.c | 8 + .../linuxlike/4 extdep static lib/meson.build | 10 + .../linuxlike/4 extdep static lib/prog.c | 5 + .../linuxlike/5 dependency versions/meson.build | 122 + .../subprojects/fakezlib/meson.build | 3 + .../subprojects/somelib/lib.c | 0 .../subprojects/somelib/meson.build | 11 + .../subprojects/somelibnover/lib.c | 0 .../subprojects/somelibnover/meson.build | 8 + .../subprojects/somelibver/lib.c | 0 .../subprojects/somelibver/meson.build | 9 + .../linuxlike/6 subdir include order/meson.build | 12 + .../linuxlike/6 subdir include order/prog.c | 7 + .../linuxlike/6 subdir include order/subdir/glib.h | 1 + .../linuxlike/7 library versions/exe.orig.c | 8 + .../test cases/linuxlike/7 library versions/lib.c | 3 + .../linuxlike/7 library versions/meson.build | 55 + .../linuxlike/7 library versions/test.json | 14 + .../8 subproject library install/meson.build | 10 + .../subprojects/sublib/include/subdefs.h | 21 + .../subprojects/sublib/meson.build | 10 + .../subprojects/sublib/sublib.c | 5 + .../8 subproject library install/test.json | 7 + .../meson.build | 36 + meson/test cases/nasm/1 configure file/hello.asm | 27 + meson/test cases/nasm/1 configure file/meson.build | 55 + meson/test cases/native/1 trivial/meson.build | 9 + meson/test cases/native/1 trivial/trivial.c | 6 + meson/test cases/native/2 global arg/meson.build | 14 + meson/test cases/native/2 global arg/prog.c | 43 + meson/test cases/native/2 global arg/prog.cc | 15 + .../native/3 pipeline/depends/copyrunner.py | 7 + .../native/3 pipeline/depends/filecopier.c | 23 + .../native/3 pipeline/depends/libsrc.c.in | 3 + .../native/3 pipeline/depends/meson.build | 11 + meson/test cases/native/3 pipeline/depends/prog.c | 5 + meson/test cases/native/3 pipeline/input_src.dat | 1 + meson/test cases/native/3 pipeline/meson.build | 23 + meson/test cases/native/3 pipeline/prog.c | 5 + .../test cases/native/3 pipeline/src/input_src.dat | 1 + meson/test cases/native/3 pipeline/src/meson.build | 12 + meson/test cases/native/3 pipeline/src/prog.c | 9 + meson/test cases/native/3 pipeline/src/srcgen.c | 40 + meson/test cases/native/3 pipeline/srcgen.c | 69 + meson/test cases/native/4 tryrun/error.c | 3 + meson/test cases/native/4 tryrun/meson.build | 78 + meson/test cases/native/4 tryrun/no_compile.c | 1 + meson/test cases/native/4 tryrun/ok.c | 7 + meson/test cases/native/5 install script/file.txt | 0 .../test cases/native/5 install script/meson.build | 12 + meson/test cases/native/5 install script/src/exe.c | 27 + .../native/5 install script/src/meson.build | 1 + meson/test cases/native/5 install script/test.json | 8 + meson/test cases/native/5 install script/wrap.py | 6 + meson/test cases/native/6 add language/meson.build | 3 + meson/test cases/native/6 add language/prog.cc | 6 + .../native/7 selfbuilt custom/checkarg.cpp | 6 + .../test cases/native/7 selfbuilt custom/data.dat | 1 + .../native/7 selfbuilt custom/mainprog.cpp | 5 + .../native/7 selfbuilt custom/meson.build | 39 + .../test cases/native/7 selfbuilt custom/tool.cpp | 34 + .../8 external program shebang parsing/input.txt | 1 + .../8 external program shebang parsing/main.c | 72 + .../8 external program shebang parsing/meson.build | 21 + .../script.int.in | 2 + .../native/9 override with exe/main2.input | 0 .../native/9 override with exe/meson.build | 21 + .../9 override with exe/subprojects/sub/foobar.c | 13 + .../subprojects/sub/meson.build | 3 + meson/test cases/objc/1 simple/meson.build | 4 + meson/test cases/objc/1 simple/prog.m | 5 + meson/test cases/objc/2 nsstring/meson.build | 20 + meson/test cases/objc/2 nsstring/stringprog.m | 10 + meson/test cases/objc/3 objc args/meson.build | 4 + meson/test cases/objc/3 objc args/prog.m | 11 + .../objc/4 c++ project objc subproject/master.cpp | 11 + .../objc/4 c++ project objc subproject/meson.build | 6 + .../subprojects/foo/foo.m | 4 + .../subprojects/foo/meson.build | 5 + meson/test cases/objcpp/1 simple/meson.build | 4 + meson/test cases/objcpp/1 simple/prog.mm | 9 + meson/test cases/objcpp/2 objc++ args/meson.build | 4 + meson/test cases/objcpp/2 objc++ args/prog.mm | 16 + meson/test cases/osx/1 basic/main.c | 5 + meson/test cases/osx/1 basic/meson.build | 3 + .../osx/2 library versions/CMakeLists.txt | 29 + meson/test cases/osx/2 library versions/exe.orig.c | 7 + meson/test cases/osx/2 library versions/lib.c | 3 + .../test cases/osx/2 library versions/meson.build | 81 + .../osx/2 library versions/require_pkgconfig.py | 9 + meson/test cases/osx/2 library versions/test.json | 12 + .../osx/3 has function xcode8/meson.build | 30 + meson/test cases/osx/4 framework/meson.build | 23 + meson/test cases/osx/4 framework/prog.c | 3 + meson/test cases/osx/4 framework/stat.c | 1 + meson/test cases/osx/4 framework/test.json | 6 + .../osx/4 framework/xcode-frameworks.png | Bin 0 -> 421385 bytes .../test cases/osx/5 extra frameworks/meson.build | 13 + meson/test cases/osx/5 extra frameworks/prog.c | 3 + meson/test cases/osx/5 extra frameworks/stat.c | 1 + meson/test cases/osx/5 extra frameworks/test.json | 6 + meson/test cases/osx/6 multiframework/main.m | 5 + meson/test cases/osx/6 multiframework/meson.build | 13 + meson/test cases/osx/7 bitcode/libbar.mm | 7 + meson/test cases/osx/7 bitcode/libfile.c | 5 + meson/test cases/osx/7 bitcode/libfoo.m | 7 + meson/test cases/osx/7 bitcode/meson.build | 10 + meson/test cases/osx/7 bitcode/vis.h | 6 + meson/test cases/osx/8 pie/main.c | 5 + meson/test cases/osx/8 pie/meson.build | 3 + meson/test cases/python/1 basic/gluon/__init__.py | 0 meson/test cases/python/1 basic/gluon/gluonator.py | 2 + meson/test cases/python/1 basic/meson.build | 27 + meson/test cases/python/1 basic/prog.py | 8 + meson/test cases/python/1 basic/subdir/meson.build | 4 + meson/test cases/python/1 basic/subdir/subprog.py | 11 + meson/test cases/python/2 extmodule/blaster.py.in | 11 + .../test cases/python/2 extmodule/ext/meson.build | 10 + .../python/2 extmodule/ext/nested/meson.build | 16 + .../python/2 extmodule/ext/tachyon_module.c | 49 + .../python/2 extmodule/ext/wrongdir/meson.build | 7 + meson/test cases/python/2 extmodule/meson.build | 43 + meson/test cases/python/2 extmodule/test.json | 13 + meson/test cases/python/3 cython/cytest.py | 19 + .../test cases/python/3 cython/libdir/cstorer.pxd | 9 + .../test cases/python/3 cython/libdir/meson.build | 11 + meson/test cases/python/3 cython/libdir/storer.c | 24 + meson/test cases/python/3 cython/libdir/storer.h | 8 + meson/test cases/python/3 cython/libdir/storer.pyx | 16 + meson/test cases/python/3 cython/meson.build | 26 + .../4 custom target depends extmodule/blaster.py | 30 + .../ext/lib/meson-tachyonlib.c | 8 + .../ext/lib/meson-tachyonlib.h | 6 + .../ext/lib/meson.build | 4 + .../ext/meson.build | 6 + .../ext/tachyon_module.c | 51 + .../4 custom target depends extmodule/meson.build | 45 + .../test cases/python/5 modules kwarg/meson.build | 7 + .../python/6 failing subproject/meson.build | 5 + .../subprojects/bar/meson.build | 4 + meson/test cases/python3/1 basic/gluon/__init__.py | 0 .../test cases/python3/1 basic/gluon/gluonator.py | 2 + meson/test cases/python3/1 basic/meson.build | 32 + meson/test cases/python3/1 basic/prog.py | 9 + .../test cases/python3/1 basic/subdir/meson.build | 4 + meson/test cases/python3/1 basic/subdir/subprog.py | 12 + meson/test cases/python3/2 extmodule/blaster.py | 14 + .../test cases/python3/2 extmodule/ext/meson.build | 6 + .../python3/2 extmodule/ext/tachyon_module.c | 49 + meson/test cases/python3/2 extmodule/meson.build | 37 + meson/test cases/python3/3 cython/cytest.py | 23 + .../test cases/python3/3 cython/libdir/cstorer.pxd | 9 + .../test cases/python3/3 cython/libdir/meson.build | 12 + meson/test cases/python3/3 cython/libdir/storer.c | 24 + meson/test cases/python3/3 cython/libdir/storer.h | 8 + .../test cases/python3/3 cython/libdir/storer.pyx | 16 + meson/test cases/python3/3 cython/meson.build | 20 + .../4 custom target depends extmodule/blaster.py | 32 + .../ext/lib/meson-tachyonlib.c | 8 + .../ext/lib/meson-tachyonlib.h | 6 + .../ext/lib/meson.build | 4 + .../ext/meson.build | 6 + .../ext/tachyon_module.c | 51 + .../4 custom target depends extmodule/meson.build | 41 + meson/test cases/rewrite/1 basic/addSrc.json | 94 + meson/test cases/rewrite/1 basic/addTgt.json | 9 + meson/test cases/rewrite/1 basic/info.json | 57 + meson/test cases/rewrite/1 basic/meson.build | 19 + meson/test cases/rewrite/1 basic/rmSrc.json | 88 + meson/test cases/rewrite/1 basic/rmTgt.json | 17 + meson/test cases/rewrite/2 subdirs/addSrc.json | 13 + meson/test cases/rewrite/2 subdirs/addTgt.json | 10 + meson/test cases/rewrite/2 subdirs/info.json | 12 + meson/test cases/rewrite/2 subdirs/meson.build | 4 + meson/test cases/rewrite/2 subdirs/rmTgt.json | 7 + .../test cases/rewrite/2 subdirs/sub1/meson.build | 1 + .../test cases/rewrite/2 subdirs/sub2/meson.build | 1 + meson/test cases/rewrite/3 kwargs/add.json | 38 + .../rewrite/3 kwargs/defopts_delete.json | 18 + meson/test cases/rewrite/3 kwargs/defopts_set.json | 24 + meson/test cases/rewrite/3 kwargs/delete.json | 20 + meson/test cases/rewrite/3 kwargs/info.json | 20 + meson/test cases/rewrite/3 kwargs/meson.build | 7 + meson/test cases/rewrite/3 kwargs/remove.json | 38 + .../test cases/rewrite/3 kwargs/remove_regex.json | 29 + meson/test cases/rewrite/3 kwargs/set.json | 34 + .../rewrite/4 same name targets/addSrc.json | 8 + .../rewrite/4 same name targets/info.json | 12 + .../rewrite/4 same name targets/meson.build | 6 + .../rewrite/4 same name targets/sub1/meson.build | 3 + meson/test cases/rewrite/5 sorting/meson.build | 33 + meson/test cases/rust/1 basic/meson.build | 9 + meson/test cases/rust/1 basic/prog.rs | 3 + meson/test cases/rust/1 basic/subdir/meson.build | 2 + meson/test cases/rust/1 basic/subdir/prog.rs | 3 + meson/test cases/rust/1 basic/test.json | 8 + meson/test cases/rust/10 language stds/2015.rs | 3 + meson/test cases/rust/10 language stds/2018.rs | 9 + meson/test cases/rust/10 language stds/meson.build | 18 + meson/test cases/rust/11 generated main/gen.py | 20 + .../rust/11 generated main/generated_lib_main.rs | 5 + .../test cases/rust/11 generated main/meson.build | 21 + .../rust/11 generated main/sub/meson.build | 13 + meson/test cases/rust/12 bindgen/include/other.h | 6 + meson/test cases/rust/12 bindgen/meson.build | 82 + meson/test cases/rust/12 bindgen/src/gen_header.py | 19 + meson/test cases/rust/12 bindgen/src/header.h | 8 + meson/test cases/rust/12 bindgen/src/main.rs | 14 + meson/test cases/rust/12 bindgen/src/main2.rs | 15 + meson/test cases/rust/12 bindgen/src/source.c | 8 + meson/test cases/rust/12 bindgen/sub/meson.build | 39 + .../13 external c dependencies/c_accessing_zlib.c | 10 + .../rust/13 external c dependencies/meson.build | 23 + .../13 external c dependencies/meson_options.txt | 2 + .../rust/13 external c dependencies/prog.rs | 9 + .../rust/13 external c dependencies/test.json | 18 + meson/test cases/rust/14 external libm/meson.build | 24 + .../rust/14 external libm/meson_options.txt | 2 + meson/test cases/rust/14 external libm/prog.rs | 5 + meson/test cases/rust/14 external libm/rs_math.rs | 12 + meson/test cases/rust/14 external libm/test.json | 10 + .../test cases/rust/15 polyglot sharedlib/adder.c | 18 + .../test cases/rust/15 polyglot sharedlib/adder.h | 34 + .../test cases/rust/15 polyglot sharedlib/adder.rs | 9 + .../rust/15 polyglot sharedlib/addertest.c | 12 + .../rust/15 polyglot sharedlib/meson.build | 20 + .../rust/16 internal c dependencies/lib.c | 6 + .../rust/16 internal c dependencies/lib.h | 22 + .../rust/16 internal c dependencies/main.rs | 9 + .../rust/16 internal c dependencies/meson.build | 14 + .../rust/16 internal c dependencies/test.py | 25 + meson/test cases/rust/2 sharedlib/meson.build | 16 + meson/test cases/rust/2 sharedlib/prog.rs | 3 + meson/test cases/rust/2 sharedlib/stuff.rs | 3 + meson/test cases/rust/2 sharedlib/test.json | 10 + meson/test cases/rust/3 staticlib/meson.build | 5 + meson/test cases/rust/3 staticlib/prog.rs | 3 + meson/test cases/rust/3 staticlib/stuff.rs | 3 + meson/test cases/rust/3 staticlib/test.json | 7 + meson/test cases/rust/4 polyglot/meson.build | 9 + meson/test cases/rust/4 polyglot/prog.c | 8 + meson/test cases/rust/4 polyglot/stuff.rs | 6 + meson/test cases/rust/4 polyglot/test.json | 10 + .../test cases/rust/5 polyglot static/meson.build | 17 + meson/test cases/rust/5 polyglot static/prog.c | 8 + meson/test cases/rust/5 polyglot static/stuff.rs | 6 + meson/test cases/rust/5 polyglot static/test.json | 7 + .../test cases/rust/6 named staticlib/meson.build | 5 + meson/test cases/rust/6 named staticlib/prog.rs | 3 + meson/test cases/rust/6 named staticlib/stuff.rs | 1 + meson/test cases/rust/6 named staticlib/test.json | 7 + .../rust/7 private crate collision/meson.build | 5 + .../rust/7 private crate collision/prog.rs | 3 + .../rust/7 private crate collision/rand.rs | 4 + .../rust/7 private crate collision/test.json | 7 + meson/test cases/rust/8 many files/foo.rs | 3 + meson/test cases/rust/8 many files/main.rs | 5 + meson/test cases/rust/8 many files/meson.build | 3 + meson/test cases/rust/9 unit tests/meson.build | 43 + meson/test cases/rust/9 unit tests/test.rs | 24 + meson/test cases/rust/9 unit tests/test2.rs | 11 + meson/test cases/swift/1 exe/main.swift | 1 + meson/test cases/swift/1 exe/meson.build | 3 + meson/test cases/swift/2 multifile/libfile.swift | 3 + meson/test cases/swift/2 multifile/main.swift | 5 + meson/test cases/swift/2 multifile/meson.build | 3 + meson/test cases/swift/3 library/exe/main.swift | 7 + meson/test cases/swift/3 library/exe/meson.build | 2 + .../swift/3 library/lib/datasource.swift | 3 + meson/test cases/swift/3 library/lib/meson.build | 1 + .../swift/3 library/lib/othersource.swift | 3 + meson/test cases/swift/3 library/meson.build | 4 + meson/test cases/swift/4 generate/gen/main.swift | 18 + meson/test cases/swift/4 generate/gen/meson.build | 6 + meson/test cases/swift/4 generate/meson.build | 4 + meson/test cases/swift/4 generate/user/main.swift | 3 + meson/test cases/swift/4 generate/user/meson.build | 2 + meson/test cases/swift/5 mixed/main.swift | 3 + meson/test cases/swift/5 mixed/meson.build | 6 + meson/test cases/swift/5 mixed/mylib.c | 5 + meson/test cases/swift/5 mixed/mylib.h | 3 + meson/test cases/swift/6 modulemap/main.swift | 5 + meson/test cases/swift/6 modulemap/meson.build | 8 + .../test cases/swift/6 modulemap/module.modulemap | 5 + meson/test cases/swift/6 modulemap/mylib.c | 5 + meson/test cases/swift/6 modulemap/mylib.h | 3 + .../test cases/swift/7 modulemap subdir/main.swift | 5 + .../swift/7 modulemap subdir/meson.build | 6 + .../swift/7 modulemap subdir/mylib/meson.build | 4 + .../7 modulemap subdir/mylib/module.modulemap | 5 + .../swift/7 modulemap subdir/mylib/mylib.c | 5 + .../swift/7 modulemap subdir/mylib/mylib.h | 3 + meson/test cases/unit/1 soname/CMakeLists.txt | 26 + meson/test cases/unit/1 soname/meson.build | 22 + meson/test cases/unit/1 soname/versioned.c | 3 + meson/test cases/unit/10 build_rpath/meson.build | 16 + meson/test cases/unit/10 build_rpath/prog.c | 5 + meson/test cases/unit/10 build_rpath/prog.cc | 8 + .../test cases/unit/10 build_rpath/sub/meson.build | 1 + meson/test cases/unit/10 build_rpath/sub/stuff.c | 3 + meson/test cases/unit/100 rlib linkage/lib2.rs | 5 + meson/test cases/unit/100 rlib linkage/main.rs | 5 + meson/test cases/unit/100 rlib linkage/meson.build | 22 + meson/test cases/unit/11 cross prog/meson.build | 16 + .../unit/11 cross prog/some_cross_tool.py | 4 + meson/test cases/unit/11 cross prog/sometool.py | 4 + meson/test cases/unit/12 promote/meson.build | 5 + .../unit/12 promote/subprojects/s1/meson.build | 7 + .../test cases/unit/12 promote/subprojects/s1/s1.c | 6 + .../subprojects/s1/subprojects/s3/meson.build | 4 + .../12 promote/subprojects/s1/subprojects/s3/s3.c | 3 + .../subprojects/s1/subprojects/scommon/meson.build | 4 + .../s1/subprojects/scommon/scommon_broken.c | 1 + .../unit/12 promote/subprojects/s2/meson.build | 6 + .../test cases/unit/12 promote/subprojects/s2/s2.c | 6 + .../subprojects/s2/subprojects/athing.wrap | 1 + .../subprojects/s2/subprojects/scommon/meson.build | 4 + .../s2/subprojects/scommon/scommon_ok.c | 3 + meson/test cases/unit/13 reconfigure/meson.build | 5 + .../test cases/unit/14 testsetup selection/main.c | 3 + .../unit/14 testsetup selection/meson.build | 10 + .../14 testsetup selection/subprojects/bar/bar.c | 3 + .../subprojects/bar/meson.build | 6 + .../14 testsetup selection/subprojects/foo/foo.c | 3 + .../subprojects/foo/meson.build | 4 + meson/test cases/unit/15 prebuilt object/main.c | 5 + .../test cases/unit/15 prebuilt object/meson.build | 25 + meson/test cases/unit/15 prebuilt object/source.c | 8 + .../unit/16 prebuilt static/libdir/best.c | 3 + .../unit/16 prebuilt static/libdir/best.h | 3 + .../unit/16 prebuilt static/libdir/meson.build | 5 + meson/test cases/unit/16 prebuilt static/main.c | 7 + .../test cases/unit/16 prebuilt static/meson.build | 5 + .../unit/17 prebuilt shared/alexandria.c | 6 + .../unit/17 prebuilt shared/alexandria.h | 20 + .../unit/17 prebuilt shared/another_visitor.c | 10 + .../test cases/unit/17 prebuilt shared/meson.build | 14 + meson/test cases/unit/17 prebuilt shared/patron.c | 9 + meson/test cases/unit/18 pkgconfig static/foo.c | 8 + .../test cases/unit/18 pkgconfig static/foo.pc.in | 11 + .../unit/18 pkgconfig static/include/foo.h | 3 + meson/test cases/unit/18 pkgconfig static/main.c | 14 + .../unit/18 pkgconfig static/meson.build | 37 + meson/test cases/unit/19 array option/meson.build | 15 + .../unit/19 array option/meson_options.txt | 20 + meson/test cases/unit/2 testsetups/buggy.c | 14 + meson/test cases/unit/2 testsetups/envcheck.py | 5 + meson/test cases/unit/2 testsetups/impl.c | 5 + meson/test cases/unit/2 testsetups/impl.h | 3 + meson/test cases/unit/2 testsetups/meson.build | 26 + .../unit/20 subproj dep variables/meson.build | 16 + .../subprojects/failingsubproj/meson.build | 3 + .../subprojects/nestedsubproj/meson.build | 3 + .../nestedsubproj/subprojects/subsubproject.wrap | 1 + .../subprojects/somesubproj/meson.build | 3 + meson/test cases/unit/21 exit status/meson.build | 2 + meson/test cases/unit/22 warning location/a.c | 0 meson/test cases/unit/22 warning location/b.c | 0 meson/test cases/unit/22 warning location/conf.in | 1 + meson/test cases/unit/22 warning location/main.c | 0 .../unit/22 warning location/meson.build | 11 + meson/test cases/unit/22 warning location/sub/c.c | 0 meson/test cases/unit/22 warning location/sub/d.c | 0 .../unit/22 warning location/sub/meson.build | 4 + .../test cases/unit/22 warning location/sub/sub.c | 0 .../unit/23 unfound pkgconfig/meson.build | 15 + meson/test cases/unit/23 unfound pkgconfig/some.c | 3 + .../unit/24 compiler run_command/meson.build | 10 + .../unit/25 non-permitted kwargs/meson.build | 5 + .../test cases/unit/26 install umask/datafile.cat | 1 + meson/test cases/unit/26 install umask/meson.build | 7 + .../test cases/unit/26 install umask/myinstall.py | 17 + meson/test cases/unit/26 install umask/prog.1 | 1 + meson/test cases/unit/26 install umask/prog.c | 3 + meson/test cases/unit/26 install umask/sample.h | 6 + .../unit/26 install umask/subdir/datafile.dog | 1 + .../unit/26 install umask/subdir/sayhello | 2 + .../unit/27 pkgconfig usage/dependee/meson.build | 7 + .../unit/27 pkgconfig usage/dependee/pkguser.c | 6 + .../unit/27 pkgconfig usage/dependency/meson.build | 24 + .../unit/27 pkgconfig usage/dependency/pkgdep.c | 7 + .../unit/27 pkgconfig usage/dependency/pkgdep.h | 3 + .../27 pkgconfig usage/dependency/privatelib.c | 3 + meson/test cases/unit/28 ndebug if-release/main.c | 11 + .../unit/28 ndebug if-release/meson.build | 3 + .../unit/29 guessed linker dependencies/exe/app.c | 6 + .../29 guessed linker dependencies/exe/meson.build | 7 + .../unit/29 guessed linker dependencies/lib/lib.c | 20 + .../29 guessed linker dependencies/lib/meson.build | 11 + .../lib/meson_options.txt | 1 + .../unit/3 subproject defaults/meson.build | 11 + .../unit/3 subproject defaults/meson_options.txt | 3 + .../subprojects/foob/meson.build | 12 + .../subprojects/foob/meson_options.txt | 5 + .../unit/30 shared_mod linking/libfile.c | 14 + meson/test cases/unit/30 shared_mod linking/main.c | 11 + .../unit/30 shared_mod linking/meson.build | 5 + meson/test cases/unit/31 forcefallback/meson.build | 9 + .../subprojects/notzlib/meson.build | 7 + .../31 forcefallback/subprojects/notzlib/notzlib.c | 6 + .../31 forcefallback/subprojects/notzlib/notzlib.h | 18 + .../unit/31 forcefallback/test_not_zlib.c | 8 + .../unit/32 pkgconfig use libraries/app/app.c | 6 + .../32 pkgconfig use libraries/app/meson.build | 5 + .../unit/32 pkgconfig use libraries/lib/liba.c | 2 + .../unit/32 pkgconfig use libraries/lib/libb.c | 5 + .../32 pkgconfig use libraries/lib/meson.build | 16 + .../meson.build | 3 + .../33 cross file overrides always args/test.c | 8 + .../ubuntu-armhf-overrides.txt | 19 + meson/test cases/unit/34 command line/meson.build | 9 + .../unit/34 command line/meson_options.txt | 2 + .../34 command line/subprojects/subp/meson.build | 3 + .../subprojects/subp/meson_options.txt | 1 + meson/test cases/unit/35 dist script/meson.build | 10 + meson/test cases/unit/35 dist script/prog.c | 7 + meson/test cases/unit/35 dist script/replacer.py | 16 + .../35 dist script/subprojects/sub/dist-script.py | 12 + .../35 dist script/subprojects/sub/meson.build | 11 + .../subprojects/sub/meson_options.txt | 1 + .../unit/35 dist script/subprojects/sub/prog.c | 1 + .../unit/36 exe_wrapper behaviour/broken-cross.txt | 20 + .../unit/36 exe_wrapper behaviour/meson.build | 19 + .../36 exe_wrapper behaviour/meson_options.txt | 2 + .../unit/36 exe_wrapper behaviour/prog.c | 17 + .../unit/37 mixed command line args/meson.build | 1 + .../37 mixed command line args/meson_options.txt | 10 + .../unit/38 pkgconfig format/meson.build | 18 + .../test cases/unit/38 pkgconfig format/somelib.c | 7 + .../test cases/unit/38 pkgconfig format/someret.c | 3 + .../test cases/unit/39 python extmodule/blaster.py | 14 + .../unit/39 python extmodule/ext/meson.build | 6 + .../unit/39 python extmodule/ext/tachyon_module.c | 59 + .../unit/39 python extmodule/meson.build | 26 + .../unit/39 python extmodule/meson_options.txt | 4 + .../unit/4 suite selection/failing_test.c | 1 + .../test cases/unit/4 suite selection/meson.build | 17 + .../subprojects/subprjfail/failing_test.c | 1 + .../subprojects/subprjfail/meson.build | 9 + .../subprojects/subprjmix/failing_test.c | 1 + .../subprojects/subprjmix/meson.build | 9 + .../subprojects/subprjmix/successful_test.c | 1 + .../subprojects/subprjsucc/meson.build | 9 + .../subprojects/subprjsucc/successful_test.c | 1 + .../unit/4 suite selection/successful_test.c | 1 + .../built library/bar.c | 7 + .../built library/meson.build | 26 + .../built library/meson_options.txt | 1 + .../built library/prog.c | 7 + .../external library/bar.c | 6 + .../external library/faa.c | 4 + .../external library/foo.c | 4 + .../external library/meson.build | 22 + .../unit/41 featurenew subprojects/meson.build | 7 + .../subprojects/bar/meson.build | 3 + .../subprojects/baz/meson.build | 3 + .../subprojects/foo/meson.build | 3 + meson/test cases/unit/42 rpath order/meson.build | 11 + meson/test cases/unit/42 rpath order/myexe.c | 3 + .../unit/42 rpath order/subprojects/sub1/lib.c | 0 .../42 rpath order/subprojects/sub1/meson.build | 5 + .../unit/42 rpath order/subprojects/sub2/lib.c | 0 .../42 rpath order/subprojects/sub2/meson.build | 5 + meson/test cases/unit/43 dep order/lib1.c | 0 meson/test cases/unit/43 dep order/lib2.c | 0 meson/test cases/unit/43 dep order/meson.build | 8 + meson/test cases/unit/43 dep order/myexe.c | 3 + meson/test cases/unit/44 promote wrap/meson.build | 5 + .../44 promote wrap/subprojects/s1/meson.build | 2 + .../s1/subprojects/ambiguous/meson.build | 2 + .../44 promote wrap/subprojects/s2/meson.build | 2 + .../subprojects/s2/subprojects/ambiguous.wrap | 2 + meson/test cases/unit/45 vscpp17/main.cpp | 29 + meson/test cases/unit/45 vscpp17/meson.build | 4 + .../46 native dep pkgconfig var/cross_pkgconfig.py | 12 + .../cross_pkgconfig/dep_tester.pc | 5 + .../unit/46 native dep pkgconfig var/meson.build | 15 + .../46 native dep pkgconfig var/meson_options.txt | 6 + .../native_pkgconfig/dep_tester.pc | 5 + .../unit/47 native file binary/meson.build | 21 + .../unit/47 native file binary/meson_options.txt | 5 + meson/test cases/unit/48 reconfigure/main.c | 4 + meson/test cases/unit/48 reconfigure/meson.build | 11 + .../unit/48 reconfigure/meson_options.txt | 4 + .../48 reconfigure/subprojects/sub1/meson.build | 3 + .../unit/49 testsetup default/envcheck.py | 11 + .../unit/49 testsetup default/meson.build | 23 + .../unit/5 compiler detection/compiler wrapper.py | 6 + .../unit/5 compiler detection/meson.build | 8 + .../test cases/unit/5 compiler detection/trivial.c | 6 + .../unit/5 compiler detection/trivial.cc | 6 + .../test cases/unit/5 compiler detection/trivial.m | 5 + .../unit/5 compiler detection/trivial.mm | 9 + .../unit/50 pkgconfig csharp library/meson.build | 10 + .../unit/50 pkgconfig csharp library/somelib.cs | 12 + .../unit/51 noncross options/meson.build | 14 + meson/test cases/unit/51 noncross options/prog.c | 1 + meson/test cases/unit/51 noncross options/ylib.pc | 13 + meson/test cases/unit/52 ldflagdedup/bob.c | 5 + meson/test cases/unit/52 ldflagdedup/meson.build | 12 + meson/test cases/unit/52 ldflagdedup/prog.c | 7 + .../53 pkgconfig static link order/meson.build | 11 + .../test cases/unit/54 clang-format/.clang-format | 5 + .../unit/54 clang-format/dummydir.h/dummy.dat | 1 + .../unit/54 clang-format/header_expected_h | 3 + .../test cases/unit/54 clang-format/header_orig_h | 9 + meson/test cases/unit/54 clang-format/meson.build | 4 + .../unit/54 clang-format/prog_expected_c | 6 + meson/test cases/unit/54 clang-format/prog_orig_c | 21 + .../subprojects/projectBad/meson.build | 9 + .../subprojects/projectBad/meson_options.txt | 1 + .../unit/56 dedup compiler libs/app/app.c | 13 + .../unit/56 dedup compiler libs/app/meson.build | 2 + .../unit/56 dedup compiler libs/liba/liba.c | 18 + .../unit/56 dedup compiler libs/liba/liba.h | 8 + .../unit/56 dedup compiler libs/liba/meson.build | 8 + .../unit/56 dedup compiler libs/libb/libb.c | 7 + .../unit/56 dedup compiler libs/libb/libb.h | 6 + .../unit/56 dedup compiler libs/libb/meson.build | 6 + .../unit/56 dedup compiler libs/meson.build | 7 + meson/test cases/unit/57 introspection/cp.py | 5 + meson/test cases/unit/57 introspection/meson.build | 72 + .../unit/57 introspection/meson_options.txt | 2 + .../unit/57 introspection/sharedlib/meson.build | 2 + .../unit/57 introspection/sharedlib/shared.cpp | 9 + .../unit/57 introspection/sharedlib/shared.hpp | 10 + .../unit/57 introspection/staticlib/meson.build | 3 + .../unit/57 introspection/staticlib/static.c | 5 + .../unit/57 introspection/staticlib/static.h | 11 + meson/test cases/unit/57 introspection/t1.cpp | 13 + meson/test cases/unit/57 introspection/t2.cpp | 8 + meson/test cases/unit/57 introspection/t3.cpp | 16 + .../build_extra_path/totally_made_up_dep.pc | 7 + .../host_extra_path/totally_made_up_dep.pc | 7 + .../unit/58 pkg_config_path option/meson.build | 7 + .../unit/59 introspect buildoptions/c_compiler.py | 3 + .../unit/59 introspect buildoptions/main.c | 6 + .../unit/59 introspect buildoptions/meson.build | 18 + .../59 introspect buildoptions/meson_options.txt | 2 + .../subprojects/evilFile.txt | 0 .../subprojects/projectA/meson.build | 3 + .../subprojects/projectA/meson_options.txt | 1 + .../subprojects/projectBad/meson.build | 9 + .../subprojects/projectBad/meson_options.txt | 1 + meson/test cases/unit/6 std override/meson.build | 10 + meson/test cases/unit/6 std override/prog11.cpp | 6 + meson/test cases/unit/6 std override/prog98.cpp | 6 + meson/test cases/unit/6 std override/progp.cpp | 6 + .../unit/60 native file override/crossfile | 16 + .../unit/60 native file override/crossfile2 | 4 + .../unit/60 native file override/meson.build | 10 + .../unit/60 native file override/meson_options.txt | 13 + .../unit/60 native file override/nativefile | 16 + .../unit/61 identity cross/build_wrapper.py | 11 + .../unit/61 identity cross/host_wrapper.py | 11 + .../test cases/unit/61 identity cross/meson.build | 15 + meson/test cases/unit/61 identity cross/stuff.h | 27 + .../pkgconfig/librelativepath.pc | 9 + .../unit/63 cmake_prefix_path/meson.build | 4 + .../lib/cmake/mesontest/mesontest-config.cmake | 4 + meson/test cases/unit/64 cmake parser/meson.build | 19 + .../lib/cmake/mesontest/mesontest-config.cmake | 63 + meson/test cases/unit/65 alias target/main.c | 3 + meson/test cases/unit/65 alias target/meson.build | 15 + .../unit/66 static archive stripping/app/appA.c | 4 + .../unit/66 static archive stripping/app/appB.c | 4 + .../66 static archive stripping/app/meson.build | 7 + .../unit/66 static archive stripping/lib/libA.c | 5 + .../unit/66 static archive stripping/lib/libA.h | 1 + .../unit/66 static archive stripping/lib/libB.c | 5 + .../unit/66 static archive stripping/lib/libB.h | 1 + .../66 static archive stripping/lib/meson.build | 23 + meson/test cases/unit/67 static link/lib/func1.c | 9 + meson/test cases/unit/67 static link/lib/func10.c | 4 + meson/test cases/unit/67 static link/lib/func11.c | 6 + meson/test cases/unit/67 static link/lib/func12.c | 7 + meson/test cases/unit/67 static link/lib/func14.c | 4 + meson/test cases/unit/67 static link/lib/func15.c | 6 + meson/test cases/unit/67 static link/lib/func16.c | 6 + meson/test cases/unit/67 static link/lib/func17.c | 4 + meson/test cases/unit/67 static link/lib/func18.c | 6 + meson/test cases/unit/67 static link/lib/func19.c | 7 + meson/test cases/unit/67 static link/lib/func2.c | 6 + meson/test cases/unit/67 static link/lib/func3.c | 4 + meson/test cases/unit/67 static link/lib/func4.c | 6 + meson/test cases/unit/67 static link/lib/func5.c | 4 + meson/test cases/unit/67 static link/lib/func6.c | 6 + meson/test cases/unit/67 static link/lib/func7.c | 4 + meson/test cases/unit/67 static link/lib/func8.c | 6 + meson/test cases/unit/67 static link/lib/func9.c | 6 + .../test cases/unit/67 static link/lib/meson.build | 80 + meson/test cases/unit/67 static link/meson.build | 32 + meson/test cases/unit/67 static link/test1.c | 7 + meson/test cases/unit/67 static link/test2.c | 6 + meson/test cases/unit/67 static link/test3.c | 6 + meson/test cases/unit/67 static link/test4.c | 6 + meson/test cases/unit/67 static link/test5.c | 6 + .../test cases/unit/68 test env value/meson.build | 10 + meson/test cases/unit/68 test env value/test.py | 6 + meson/test cases/unit/69 clang-tidy/.clang-tidy | 1 + meson/test cases/unit/69 clang-tidy/cttest.cpp | 7 + .../unit/69 clang-tidy/dummydir.h/dummy.dat | 1 + meson/test cases/unit/69 clang-tidy/meson.build | 3 + meson/test cases/unit/7 run installed/foo/foo.c | 3 + .../unit/7 run installed/foo/meson.build | 7 + meson/test cases/unit/7 run installed/meson.build | 9 + meson/test cases/unit/7 run installed/prog.c | 5 + meson/test cases/unit/70 cross/crossfile.in | 5 + meson/test cases/unit/70 cross/meson.build | 16 + meson/test cases/unit/70 cross/meson_options.txt | 1 + .../unit/71 cross test passed/exewrapper.py | 24 + .../unit/71 cross test passed/meson.build | 19 + .../unit/71 cross test passed/meson_options.txt | 5 + .../test cases/unit/71 cross test passed/script.py | 7 + .../unit/71 cross test passed/src/main.c | 6 + meson/test cases/unit/72 summary/meson.build | 23 + meson/test cases/unit/72 summary/meson_options.txt | 1 + .../unit/72 summary/subprojects/sub/meson.build | 4 + .../unit/72 summary/subprojects/sub2/meson.build | 6 + .../sub2/subprojects/subsub/meson.build | 3 + meson/test cases/unit/73 wrap file url/meson.build | 4 + .../73 wrap file url/subprojects/foo-patch.tar.xz | Bin 0 -> 228 bytes .../unit/73 wrap file url/subprojects/foo.tar.xz | Bin 0 -> 216 bytes meson/test cases/unit/74 dep files/foo.c | 0 meson/test cases/unit/74 dep files/meson.build | 16 + .../unit/75 pkgconfig prefixes/client/client.c | 8 + .../unit/75 pkgconfig prefixes/client/meson.build | 3 + .../unit/75 pkgconfig prefixes/val1/meson.build | 5 + .../unit/75 pkgconfig prefixes/val1/val1.c | 3 + .../unit/75 pkgconfig prefixes/val1/val1.h | 1 + .../unit/75 pkgconfig prefixes/val2/meson.build | 8 + .../unit/75 pkgconfig prefixes/val2/val2.c | 4 + .../unit/75 pkgconfig prefixes/val2/val2.h | 1 + meson/test cases/unit/76 subdir libdir/meson.build | 2 + .../76 subdir libdir/subprojects/flub/meson.build | 1 + meson/test cases/unit/77 as link whole/bar.c | 6 + meson/test cases/unit/77 as link whole/foo.c | 6 + meson/test cases/unit/77 as link whole/meson.build | 11 + meson/test cases/unit/78 nostdlib/meson.build | 14 + meson/test cases/unit/78 nostdlib/prog.c | 7 + .../unit/78 nostdlib/subprojects/mylibc/libc.c | 35 + .../78 nostdlib/subprojects/mylibc/meson.build | 13 + .../unit/78 nostdlib/subprojects/mylibc/stdio.h | 5 + .../78 nostdlib/subprojects/mylibc/stubstart.s | 8 + .../unit/79 user options for subproject/.gitignore | 1 + .../75 user options for subproject/.gitignore | 1 + .../75 user options for subproject/meson.build | 3 + meson/test cases/unit/8 -L -l order/first.pc | 13 + meson/test cases/unit/8 -L -l order/meson.build | 6 + meson/test cases/unit/8 -L -l order/prog.c | 5 + meson/test cases/unit/8 -L -l order/second.pc | 13 + meson/test cases/unit/80 global-rpath/meson.build | 3 + .../test cases/unit/80 global-rpath/rpathified.cpp | 6 + .../unit/80 global-rpath/yonder/meson.build | 5 + .../unit/80 global-rpath/yonder/yonder.cpp | 3 + .../unit/80 global-rpath/yonder/yonder.h | 1 + meson/test cases/unit/81 wrap-git/meson.build | 4 + .../packagefiles/wrap_git_builddef/meson.build | 3 + .../subprojects/wrap_git_upstream/main.c | 4 + .../unit/82 meson version compare/meson.build | 19 + .../subprojects/foo/meson.build | 8 + .../unit/83 cross only introspect/meson.build | 2 + .../unit/84 change option choices/meson.build | 1 + .../84 change option choices/meson_options.1.txt | 13 + .../84 change option choices/meson_options.2.txt | 13 + .../85 nested subproject regenerate depends/main.c | 3 + .../meson.build | 6 + .../subprojects/sub1/meson.build | 4 + .../subprojects/sub2/CMakeLists.txt | 1 + meson/test cases/unit/86 cpp modules/main.cpp | 7 + meson/test cases/unit/86 cpp modules/meson.build | 17 + meson/test cases/unit/86 cpp modules/src0.ixx | 7 + meson/test cases/unit/86 cpp modules/src1.ixx | 7 + meson/test cases/unit/86 cpp modules/src2.ixx | 7 + meson/test cases/unit/86 cpp modules/src3.ixx | 7 + meson/test cases/unit/86 cpp modules/src4.ixx | 7 + meson/test cases/unit/86 cpp modules/src5.ixx | 7 + meson/test cases/unit/86 cpp modules/src6.ixx | 7 + meson/test cases/unit/86 cpp modules/src7.ixx | 7 + meson/test cases/unit/86 cpp modules/src8.ixx | 7 + meson/test cases/unit/86 cpp modules/src9.ixx | 5 + meson/test cases/unit/87 prelinking/file1.c | 14 + meson/test cases/unit/87 prelinking/file2.c | 9 + meson/test cases/unit/87 prelinking/file3.c | 9 + meson/test cases/unit/87 prelinking/file4.c | 9 + meson/test cases/unit/87 prelinking/main.c | 10 + meson/test cases/unit/87 prelinking/meson.build | 8 + .../test cases/unit/87 prelinking/private_header.h | 11 + .../test cases/unit/87 prelinking/public_header.h | 3 + meson/test cases/unit/88 run native test/main.c | 17 + .../test cases/unit/88 run native test/meson.build | 6 + .../unit/89 multiple envvars/meson.build | 4 + meson/test cases/unit/89 multiple envvars/prog.c | 18 + meson/test cases/unit/89 multiple envvars/prog.cpp | 18 + meson/test cases/unit/9 d dedup/meson.build | 6 + meson/test cases/unit/9 d dedup/prog.c | 14 + .../unit/90 pkgconfig build rpath order/dummy.pc | 7 + .../90 pkgconfig build rpath order/meson.build | 20 + .../unit/90 pkgconfig build rpath order/prog.c | 5 + .../unit/90 pkgconfig build rpath order/prog.cc | 8 + .../90 pkgconfig build rpath order/sub/meson.build | 1 + .../90 pkgconfig build rpath order/sub/stuff.c | 3 + meson/test cases/unit/91 devenv/main.c | 14 + meson/test cases/unit/91 devenv/meson.build | 12 + .../unit/91 devenv/subprojects/sub/foo.c | 10 + .../unit/91 devenv/subprojects/sub/meson.build | 6 + meson/test cases/unit/91 devenv/test-devenv.py | 8 + .../unit/92 install skip subprojects/foo.c | 4 + .../unit/92 install skip subprojects/foo.dat | 1 + .../unit/92 install skip subprojects/foo.h | 1 + .../unit/92 install skip subprojects/foo/foofile | 0 .../unit/92 install skip subprojects/meson.build | 8 + .../subprojects/bar/bar.c | 4 + .../subprojects/bar/bar.dat | 1 + .../subprojects/bar/bar.h | 1 + .../subprojects/bar/bar/barfile | 1 + .../subprojects/bar/meson.build | 6 + .../meson.build | 7 + .../meson_options.txt | 3 + .../subprojects/sub/foo.c | 6 + .../subprojects/sub/meson.build | 7 + meson/test cases/unit/94 clangformat/.clang-format | 4 + .../unit/94 clangformat/.clang-format-ignore | 3 + .../unit/94 clangformat/.clang-format-include | 3 + meson/test cases/unit/94 clangformat/meson.build | 1 + .../unit/94 clangformat/not-included/badformat.cpp | 2 + .../test cases/unit/94 clangformat/src/badformat.c | 2 + .../unit/94 clangformat/src/badformat.cpp | 2 + .../unit/95 custominc/easytogrepfor/genh.py | 7 + .../unit/95 custominc/easytogrepfor/meson.build | 3 + meson/test cases/unit/95 custominc/helper.c | 5 + meson/test cases/unit/95 custominc/meson.build | 9 + meson/test cases/unit/95 custominc/prog.c | 9 + meson/test cases/unit/95 custominc/prog2.c | 10 + .../unit/96 implicit force fallback/meson.build | 8 + .../subprojects/something/meson.build | 3 + meson/test cases/unit/97 link full name/.gitignore | 5 + .../97 link full name/libtestprovider/meson.build | 20 + .../97 link full name/libtestprovider/provider.c | 12 + .../unit/97 link full name/proguser/meson.build | 11 + .../unit/97 link full name/proguser/receiver.c | 19 + meson/test cases/vala/1 basic/meson.build | 7 + meson/test cases/vala/1 basic/prog.vala | 7 + meson/test cases/vala/10 mixed sources/c/foo.c | 5 + .../test cases/vala/10 mixed sources/c/meson.build | 5 + meson/test cases/vala/10 mixed sources/c/writec.py | 12 + meson/test cases/vala/10 mixed sources/meson.build | 7 + .../test cases/vala/10 mixed sources/vala/bar.vala | 5 + .../test cases/vala/11 generated vapi/libbar/bar.c | 29 + .../test cases/vala/11 generated vapi/libbar/bar.h | 9 + .../vala/11 generated vapi/libbar/meson.build | 33 + .../test cases/vala/11 generated vapi/libfoo/foo.c | 28 + .../test cases/vala/11 generated vapi/libfoo/foo.h | 9 + .../vala/11 generated vapi/libfoo/meson.build | 36 + meson/test cases/vala/11 generated vapi/main.vala | 9 + .../test cases/vala/11 generated vapi/meson.build | 13 + meson/test cases/vala/11 generated vapi/test.json | 13 + meson/test cases/vala/12 custom output/bar.vala | 0 meson/test cases/vala/12 custom output/foo.vala | 0 meson/test cases/vala/12 custom output/meson.build | 13 + meson/test cases/vala/13 find library/meson.build | 9 + meson/test cases/vala/13 find library/test.vala | 6 + .../gres/meson.build | 3 + .../gres/test-resources.xml | 6 + .../gres/test.ui | 19 + .../meson.build | 12 + .../test.vala | 37 + .../vala/15 static vapi in source tree/meson.build | 13 + .../vala/15 static vapi in source tree/test.vala | 6 + .../15 static vapi in source tree/vapi/config.vapi | 4 + meson/test cases/vala/16 mixed dependence/app.vala | 7 + .../vala/16 mixed dependence/meson.build | 16 + .../vala/16 mixed dependence/mixer-glue.c | 5 + .../test cases/vala/16 mixed dependence/mixer.vala | 3 + meson/test cases/vala/17 plain consumer/app.c | 11 + .../test cases/vala/17 plain consumer/badger.vala | 10 + .../test cases/vala/17 plain consumer/meson.build | 12 + .../vala/18 vapi consumed twice/app.vala | 11 + .../vala/18 vapi consumed twice/beer.vala | 10 + .../vala/18 vapi consumed twice/meson.build | 15 + .../vala/18 vapi consumed twice/person.vala | 16 + meson/test cases/vala/19 genie/meson.build | 6 + meson/test cases/vala/19 genie/prog.gs | 2 + meson/test cases/vala/2 multiple files/class1.vala | 7 + meson/test cases/vala/2 multiple files/class2.vala | 6 + meson/test cases/vala/2 multiple files/main.vala | 8 + meson/test cases/vala/2 multiple files/meson.build | 10 + .../20 genie multiple mixed sources/c_test_one.c | 5 + .../20 genie multiple mixed sources/c_test_two.c | 5 + .../vala/20 genie multiple mixed sources/init.gs | 11 + .../20 genie multiple mixed sources/meson.build | 19 + .../20 genie multiple mixed sources/test_one.gs | 5 + .../20 genie multiple mixed sources/test_two.gs | 5 + .../vala_test_one.vala | 7 + .../vala_test_two.vala | 7 + meson/test cases/vala/21 type module/foo.vala | 17 + meson/test cases/vala/21 type module/meson.build | 20 + .../test cases/vala/21 type module/plugin-bar.vala | 11 + .../vala/21 type module/plugin-module.vala | 54 + meson/test cases/vala/21 type module/plugin.vala | 4 + .../Subdir/Subdir2/Test.vala | 5 + .../22 same target in directories/Subdir/Test.vala | 5 + .../Subdir2/Test.vala | 5 + .../vala/22 same target in directories/Test.vala | 5 + .../vala/22 same target in directories/meson.build | 13 + .../vala/22 same target in directories/prog.vala | 8 + meson/test cases/vala/23 thread flags/meson.build | 7 + meson/test cases/vala/23 thread flags/prog.vala | 2 + .../vala/24 export dynamic shared module/app.vala | 21 + .../24 export dynamic shared module/meson.build | 20 + .../24 export dynamic shared module/module.vala | 3 + .../vala/25 extract_all_objects/meson.build | 11 + .../vala/25 extract_all_objects/prog.vala | 7 + meson/test cases/vala/26 vala and asm/meson.build | 23 + meson/test cases/vala/26 vala and asm/prog.vala | 9 + meson/test cases/vala/26 vala and asm/retval-arm.S | 11 + meson/test cases/vala/26 vala and asm/retval-x86.S | 12 + .../vala/26 vala and asm/retval-x86_64.S | 11 + .../vala/26 vala and asm/symbol-underscore.h | 5 + meson/test cases/vala/3 dep/gioprog.vala | 8 + meson/test cases/vala/3 dep/meson.build | 12 + meson/test cases/vala/4 config/config.vapi | 1 + .../vala/4 config/meson-something-else.vapi | 1 + meson/test cases/vala/4 config/meson.build | 15 + meson/test cases/vala/4 config/prog.vala | 8 + .../test cases/vala/5 target glib/GLib.Thread.vala | 43 + meson/test cases/vala/5 target glib/meson.build | 10 + meson/test cases/vala/5 target glib/retcode.c | 5 + meson/test cases/vala/6 static library/meson.build | 17 + meson/test cases/vala/6 static library/mylib.vala | 5 + meson/test cases/vala/6 static library/prog.vala | 7 + meson/test cases/vala/6 static library/test.json | 5 + .../vala/7 shared library/lib/meson.build | 35 + .../vala/7 shared library/lib/mylib.vala | 5 + meson/test cases/vala/7 shared library/meson.build | 10 + .../vala/7 shared library/prog/meson.build | 4 + .../vala/7 shared library/prog/prog.vala | 7 + meson/test cases/vala/7 shared library/test.json | 14 + .../dependency-generated/enum-types.c.template | 43 + .../dependency-generated/enum-types.h.template | 26 + .../dependency-generated/enums.h | 15 + .../dependency-generated/lib.vala | 3 + .../dependency-generated/main.vala | 3 + .../dependency-generated/meson.build | 44 + .../dependency-generated/null.c | 1 + .../vala/8 generated sources/meson.build | 14 + .../vala/8 generated sources/onlygen/maingen.in | 3 + .../vala/8 generated sources/onlygen/meson.build | 7 + .../vala/8 generated sources/src/config.vala.in | 3 + .../vala/8 generated sources/src/copy_file.py | 6 + .../vala/8 generated sources/src/meson.build | 17 + .../vala/8 generated sources/src/returncode.in | 3 + .../vala/8 generated sources/src/test.vala | 4 + .../vala/8 generated sources/src/write_wrapper.py | 12 + .../test cases/vala/8 generated sources/test.json | 7 + .../vala/8 generated sources/tools/meson.build | 3 + meson/test cases/vala/9 gir/foo.vala | 7 + meson/test cases/vala/9 gir/meson.build | 18 + meson/test cases/vala/9 gir/test.json | 7 + .../warning/1 version for string div/a/b.c | 3 + .../warning/1 version for string div/meson.build | 3 + .../warning/1 version for string div/test.json | 8 + .../warning/2 languages missing native/meson.build | 3 + .../warning/2 languages missing native/test.json | 7 + .../warning/3 fallback consistency/meson.build | 7 + .../subprojects/sub/meson.build | 5 + .../warning/3 fallback consistency/test.json | 7 + .../warning/4 fallback consistency/meson.build | 4 + .../subprojects/sub/meson.build | 5 + .../warning/4 fallback consistency/test.json | 7 + .../warning/5 fallback consistency/meson.build | 4 + .../5 fallback consistency/subprojects/foo.wrap | 6 + .../subprojects/foo/meson.build | 6 + .../warning/5 fallback consistency/test.json | 7 + meson/test cases/wasm/1 basic/hello.c | 7 + meson/test cases/wasm/1 basic/hello.cpp | 7 + meson/test cases/wasm/1 basic/hello.html | 8 + meson/test cases/wasm/1 basic/meson.build | 4 + meson/test cases/wasm/2 threads/meson.build | 10 + meson/test cases/wasm/2 threads/threads.c | 21 + meson/test cases/wasm/2 threads/threads.cpp | 13 + meson/test cases/windows/1 basic/meson.build | 4 + meson/test cases/windows/1 basic/prog.c | 5 + meson/test cases/windows/1 basic/test.json | 6 + .../meson.build | 10 + .../prog.c | 5 + .../subdir/make_def.py | 6 + .../subdir/meson.build | 7 + .../subdir/somedll.c | 3 + meson/test cases/windows/11 exe implib/meson.build | 7 + meson/test cases/windows/11 exe implib/prog.c | 6 + meson/test cases/windows/11 exe implib/test.json | 12 + .../12 resources with custom targets/meson.build | 70 + .../12 resources with custom targets/prog.c | 19 + .../res/gen-res.py | 6 + .../res/meson.build | 19 + .../res/myres.rc.in | 4 + .../res/myres_static.rc | 3 + .../res/resource.h | 0 .../res/sample.ico | Bin 0 -> 9662 bytes .../13 test argument extra paths/exe/main.c | 5 + .../13 test argument extra paths/exe/meson.build | 3 + .../windows/13 test argument extra paths/lib/foo.c | 6 + .../windows/13 test argument extra paths/lib/foo.h | 14 + .../13 test argument extra paths/lib/meson.build | 3 + .../13 test argument extra paths/meson.build | 5 + .../13 test argument extra paths/test/meson.build | 3 + .../test/test_run_exe.py | 12 + .../ico/gen-ico.py | 6 + .../ico/meson.build | 8 + .../ico/sample.ico.in | Bin 0 -> 9662 bytes .../meson.build | 69 + .../prog.c | 19 + .../res/meson.build | 4 + .../res/myres.rc | 3 + .../a/meson.build | 1 + .../a/rsrc.rc | 1 + .../b/meson.build | 2 + .../b/rsrc.rc | 1 + .../c/meson.build | 2 + .../c/rsrc.rc | 1 + .../exe3/meson.build | 5 + .../exe3/src_dll/main.c | 10 + .../exe3/src_dll/version.rc | 11 + .../exe3/src_exe/main.c | 3 + .../exe3/src_exe/version.rc | 11 + .../exe4/meson.build | 5 + .../exe4/src_dll/main.c | 10 + .../exe4/src_dll/version.rc | 11 + .../exe4/src_exe/main.c | 3 + .../exe4/src_exe/version.rc | 11 + .../meson.build | 21 + .../rsrc.rc | 1 + .../verify.c | 25 + meson/test cases/windows/16 gui app/console_prog.c | 3 + meson/test cases/windows/16 gui app/dummy.c | 0 .../windows/16 gui app/gui_app_tester.py | 19 + meson/test cases/windows/16 gui app/gui_prog.c | 11 + meson/test cases/windows/16 gui app/meson.build | 26 + meson/test cases/windows/17 msvc ndebug/main.cpp | 9 + .../test cases/windows/17 msvc ndebug/meson.build | 7 + meson/test cases/windows/2 winmain/meson.build | 4 + meson/test cases/windows/2 winmain/prog.c | 15 + meson/test cases/windows/3 cpp/meson.build | 4 + meson/test cases/windows/3 cpp/prog.cpp | 7 + meson/test cases/windows/4 winmaincpp/meson.build | 4 + meson/test cases/windows/4 winmaincpp/prog.cpp | 17 + .../test cases/windows/5 resources/inc/meson.build | 1 + .../windows/5 resources/inc/resource/resource.h | 1 + meson/test cases/windows/5 resources/meson.build | 69 + meson/test cases/windows/5 resources/prog.c | 21 + meson/test cases/windows/5 resources/res/dummy.c | 0 .../test cases/windows/5 resources/res/meson.build | 10 + meson/test cases/windows/5 resources/res/myres.rc | 4 + .../test cases/windows/5 resources/res/sample.ico | Bin 0 -> 9662 bytes .../windows/6 vs module defs/meson.build | 5 + meson/test cases/windows/6 vs module defs/prog.c | 5 + .../windows/6 vs module defs/subdir/meson.build | 1 + .../windows/6 vs module defs/subdir/somedll.c | 3 + .../windows/6 vs module defs/subdir/somedll.def | 3 + .../windows/7 dll versioning/copyfile.py | 6 + .../test cases/windows/7 dll versioning/exe.orig.c | 8 + meson/test cases/windows/7 dll versioning/lib.c | 6 + .../windows/7 dll versioning/meson.build | 54 + .../test cases/windows/7 dll versioning/test.json | 34 + .../test cases/windows/8 find program/meson.build | 12 + .../test cases/windows/8 find program/test-script | 3 + .../windows/8 find program/test-script-ext.py | 3 + .../windows/9 vs module defs generated/meson.build | 5 + .../windows/9 vs module defs generated/prog.c | 5 + .../9 vs module defs generated/subdir/meson.build | 10 + .../9 vs module defs generated/subdir/somedll.c | 3 + .../subdir/somedll.def.in | 2 + meson/tools/ac_converter.py | 453 + meson/tools/boost_names.py | 300 + meson/tools/build_website.py | 51 + meson/tools/cmake2meson.py | 330 + meson/tools/copy_files.py | 55 + meson/tools/dircondenser.py | 89 + meson/tools/gen_data.py | 140 + meson/tools/regenerate_docs.py | 161 + meson/tools/run_with_cov.py | 53 + 3868 files changed, 161306 insertions(+) create mode 100644 meson/.editorconfig create mode 100644 meson/.flake8 create mode 100644 meson/.gitattributes create mode 100644 meson/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 meson/.github/codecov.yml create mode 100644 meson/.github/workflows/cygwin.yml create mode 100644 meson/.github/workflows/file_format.yml create mode 100644 meson/.github/workflows/images.yml create mode 100644 meson/.github/workflows/lint_mypy.yml create mode 100644 meson/.github/workflows/macos.yml create mode 100644 meson/.github/workflows/msys2.yml create mode 100644 meson/.github/workflows/nonative.yml create mode 100644 meson/.github/workflows/os_comp.yml create mode 100644 meson/.github/workflows/unusedargs_missingreturn.yml create mode 100644 meson/.github/workflows/website.yml create mode 100644 meson/.gitignore create mode 100644 meson/.lgtm.yml create mode 100644 meson/.mailmap create mode 100644 meson/.mypy.ini create mode 100644 meson/.pylintrc create mode 100644 meson/CODEOWNERS create mode 100644 meson/COPYING create mode 100644 meson/MANIFEST.in create mode 100644 meson/README.md create mode 100644 meson/azure-pipelines.yml create mode 100644 meson/ci/azure-steps.yml create mode 100644 meson/ci/ciimage/.gitignore create mode 100644 meson/ci/ciimage/arch/image.json create mode 100755 meson/ci/ciimage/arch/install.sh create mode 100644 meson/ci/ciimage/bionic/image.json create mode 100755 meson/ci/ciimage/bionic/install.sh create mode 100755 meson/ci/ciimage/build.py create mode 100644 meson/ci/ciimage/common.sh create mode 100644 meson/ci/ciimage/cuda/image.json create mode 100755 meson/ci/ciimage/cuda/install.sh create mode 100644 meson/ci/ciimage/fedora/image.json create mode 100755 meson/ci/ciimage/fedora/install.sh create mode 100644 meson/ci/ciimage/opensuse/image.json create mode 100755 meson/ci/ciimage/opensuse/install.sh create mode 100644 meson/ci/ciimage/ubuntu-rolling/image.json create mode 100755 meson/ci/ciimage/ubuntu-rolling/install.sh create mode 100755 meson/ci/ciimage/ubuntu-rolling/test.sh create mode 100644 meson/ci/run.ps1 create mode 100755 meson/ci/upload_cov.sh create mode 100644 meson/ci/usercustomize.py create mode 100644 meson/contributing.md create mode 100644 meson/cross/arm64cl.txt create mode 100644 meson/cross/armcc.txt create mode 100644 meson/cross/armclang-linux.txt create mode 100644 meson/cross/armclang.txt create mode 100644 meson/cross/c2000.txt create mode 100644 meson/cross/ccomp-armv7a.txt create mode 100644 meson/cross/ccrx.txt create mode 100644 meson/cross/iphone.txt create mode 100644 meson/cross/linux-mingw-w64-32bit.json create mode 100644 meson/cross/linux-mingw-w64-32bit.txt create mode 100644 meson/cross/linux-mingw-w64-64bit.json create mode 100644 meson/cross/linux-mingw-w64-64bit.txt create mode 100644 meson/cross/none.txt create mode 100644 meson/cross/ownstdlib.txt create mode 100644 meson/cross/tvos.txt create mode 100644 meson/cross/ubuntu-armhf.json create mode 100644 meson/cross/ubuntu-armhf.txt create mode 100644 meson/cross/ubuntu-faketarget.txt create mode 100644 meson/cross/wasm.txt create mode 100644 meson/cross/xc16.txt create mode 100644 meson/data/.coveragerc.in create mode 100644 meson/data/com.mesonbuild.install.policy create mode 100644 meson/data/macros.meson create mode 100644 meson/data/schema.xsd create mode 100644 meson/data/shell-completions/bash/meson create mode 100644 meson/data/shell-completions/zsh/_meson create mode 100644 meson/data/syntax-highlighting/emacs/meson.el create mode 100644 meson/data/syntax-highlighting/vim/README create mode 100644 meson/data/syntax-highlighting/vim/ftdetect/meson.vim create mode 100644 meson/data/syntax-highlighting/vim/ftplugin/meson.vim create mode 100644 meson/data/syntax-highlighting/vim/indent/meson.vim create mode 100644 meson/data/syntax-highlighting/vim/syntax/meson.vim create mode 100644 meson/data/test.schema.json create mode 100644 meson/docs/.editorconfig create mode 100644 meson/docs/README.md create mode 100755 meson/docs/genrelnotes.py create mode 100644 meson/docs/markdown/ARM-performance-test.md create mode 100644 meson/docs/markdown/Adding-arguments.md create mode 100644 meson/docs/markdown/Adding-new-projects-to-wrapdb.md create mode 100644 meson/docs/markdown/Additional.md create mode 100644 meson/docs/markdown/Build-options.md create mode 100644 meson/docs/markdown/Build-system-converters.md create mode 100644 meson/docs/markdown/Build-targets.md create mode 100644 meson/docs/markdown/Builtin-options.md create mode 100644 meson/docs/markdown/CMake-module.md create mode 100644 meson/docs/markdown/Code-formatting.md create mode 100644 meson/docs/markdown/Commands.md create mode 100644 meson/docs/markdown/Comparisons.md create mode 100644 meson/docs/markdown/Compiler-properties.md create mode 100644 meson/docs/markdown/Conference-presentations.md create mode 100644 meson/docs/markdown/Configuration.md create mode 100644 meson/docs/markdown/Configuring-a-build-directory.md create mode 100644 meson/docs/markdown/Contact-information.md create mode 100644 meson/docs/markdown/Continuous-Integration.md create mode 100644 meson/docs/markdown/Contributing.md create mode 100644 meson/docs/markdown/Creating-Linux-binaries.md create mode 100644 meson/docs/markdown/Creating-OSX-packages.md create mode 100644 meson/docs/markdown/Creating-releases.md create mode 100644 meson/docs/markdown/Cross-compilation.md create mode 100644 meson/docs/markdown/Cuda-module.md create mode 100644 meson/docs/markdown/Custom-build-targets.md create mode 100644 meson/docs/markdown/Cython.md create mode 100644 meson/docs/markdown/D.md create mode 100644 meson/docs/markdown/Dependencies.md create mode 100644 meson/docs/markdown/Design-rationale.md create mode 100644 meson/docs/markdown/Disabler.md create mode 100644 meson/docs/markdown/Dlang-module.md create mode 100644 meson/docs/markdown/External-Project-module.md create mode 100644 meson/docs/markdown/External-commands.md create mode 100644 meson/docs/markdown/FAQ.md create mode 100644 meson/docs/markdown/Feature-autodetection.md create mode 100644 meson/docs/markdown/Fs-module.md create mode 100644 meson/docs/markdown/Generating-sources.md create mode 100644 meson/docs/markdown/Getting-meson.md create mode 100644 meson/docs/markdown/Getting-meson_ptbr.md create mode 100644 meson/docs/markdown/Getting-meson_zh.md create mode 100644 meson/docs/markdown/Gnome-module.md create mode 100644 meson/docs/markdown/Hotdoc-module.md create mode 100644 meson/docs/markdown/IDE-integration.md create mode 100644 meson/docs/markdown/Icestorm-module.md create mode 100644 meson/docs/markdown/In-the-press.md create mode 100644 meson/docs/markdown/Include-directories.md create mode 100644 meson/docs/markdown/IndepthTutorial.md create mode 100644 meson/docs/markdown/Installing.md create mode 100644 meson/docs/markdown/Java.md create mode 100644 meson/docs/markdown/Keyval-module.md create mode 100644 meson/docs/markdown/Localisation.md create mode 100644 meson/docs/markdown/Machine-files.md create mode 100644 meson/docs/markdown/Manual.md create mode 100644 meson/docs/markdown/Meson-sample.md create mode 100644 meson/docs/markdown/MesonCI.md create mode 100644 meson/docs/markdown/Mixing-build-systems.md create mode 100644 meson/docs/markdown/Modules.md create mode 100644 meson/docs/markdown/Native-environments.md create mode 100644 meson/docs/markdown/Overview.md create mode 100644 meson/docs/markdown/Performance-comparison.md create mode 100644 meson/docs/markdown/Pkg-config-files.md create mode 100644 meson/docs/markdown/Pkgconfig-module.md create mode 100644 meson/docs/markdown/Playground.md create mode 100644 meson/docs/markdown/Porting-from-autotools.md create mode 100644 meson/docs/markdown/Precompiled-headers.md create mode 100644 meson/docs/markdown/Project-templates.md create mode 100644 meson/docs/markdown/Python-3-module.md create mode 100644 meson/docs/markdown/Python-module.md create mode 100644 meson/docs/markdown/Qt4-module.md create mode 100644 meson/docs/markdown/Qt5-module.md create mode 100644 meson/docs/markdown/Qt6-module.md create mode 100644 meson/docs/markdown/Quick-guide.md create mode 100644 meson/docs/markdown/RPM-module.md create mode 100644 meson/docs/markdown/Reference-manual.md create mode 100644 meson/docs/markdown/Reference-tables.md create mode 100644 meson/docs/markdown/Release-notes-for-0.37.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.38.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.39.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.40.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.41.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.42.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.43.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.44.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.45.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.46.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.47.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.48.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.49.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.50.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.51.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.52.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.53.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.54.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.55.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.56.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.57.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.58.0.md create mode 100644 meson/docs/markdown/Release-notes-for-0.59.0.md create mode 100644 meson/docs/markdown/Release-notes.md create mode 100644 meson/docs/markdown/Release-procedure.md create mode 100644 meson/docs/markdown/Reproducible-builds.md create mode 100644 meson/docs/markdown/Rewriter.md create mode 100644 meson/docs/markdown/Run-targets.md create mode 100644 meson/docs/markdown/Running-Meson.md create mode 100644 meson/docs/markdown/Rust-module.md create mode 100644 meson/docs/markdown/Shipping-prebuilt-binaries-as-wraps.md create mode 100644 meson/docs/markdown/Simd-module.md create mode 100644 meson/docs/markdown/Simple-comparison.md create mode 100644 meson/docs/markdown/SimpleStart.md create mode 100644 meson/docs/markdown/SourceSet-module.md create mode 100644 meson/docs/markdown/Style-guide.md create mode 100644 meson/docs/markdown/Subprojects.md create mode 100644 meson/docs/markdown/Syntax.md create mode 100644 meson/docs/markdown/Threads.md create mode 100644 meson/docs/markdown/Tutorial.md create mode 100644 meson/docs/markdown/Unit-tests.md create mode 100644 meson/docs/markdown/Unity-builds.md create mode 100644 meson/docs/markdown/Use-of-Python.md create mode 100644 meson/docs/markdown/Users.md create mode 100644 meson/docs/markdown/Using-multiple-build-directories.md create mode 100644 meson/docs/markdown/Using-the-WrapDB.md create mode 100644 meson/docs/markdown/Using-with-Visual-Studio.md create mode 100644 meson/docs/markdown/Using-wraptool.md create mode 100644 meson/docs/markdown/Vala.md create mode 100644 meson/docs/markdown/Videos.md create mode 100644 meson/docs/markdown/Vs-External.md create mode 100644 meson/docs/markdown/Windows-module.md create mode 100644 meson/docs/markdown/Wrap-best-practices-and-tips.md create mode 100644 meson/docs/markdown/Wrap-dependency-system-manual.md create mode 100644 meson/docs/markdown/Wrap-review-guidelines.md create mode 100644 meson/docs/markdown/Wrapdb-projects.md create mode 100644 meson/docs/markdown/_Sidebar.md create mode 100644 meson/docs/markdown/_include_qt_base.md create mode 100644 meson/docs/markdown/fallback-wraptool.md create mode 100644 meson/docs/markdown/howtox.md create mode 100644 meson/docs/markdown/i18n-module.md create mode 100644 meson/docs/markdown/images/buildtime.png create mode 100644 meson/docs/markdown/images/conftime.png create mode 100644 meson/docs/markdown/images/emptytime.png create mode 100644 meson/docs/markdown/images/glib_build.png create mode 100644 meson/docs/markdown/images/glib_conf.png create mode 100644 meson/docs/markdown/images/glib_empty.png create mode 100644 meson/docs/markdown/images/glib_link.png create mode 100644 meson/docs/markdown/images/gtksample.png create mode 100644 meson/docs/markdown/images/linux_alldone.png create mode 100755 meson/docs/markdown/images/meson_mac1.png create mode 100755 meson/docs/markdown/images/meson_mac2.png create mode 100755 meson/docs/markdown/images/meson_mac3.png create mode 100755 meson/docs/markdown/images/meson_mac4.png create mode 100755 meson/docs/markdown/images/meson_mac5.png create mode 100644 meson/docs/markdown/images/osx_xcode.png create mode 100644 meson/docs/markdown/images/py3-install-1.png create mode 100644 meson/docs/markdown/images/py3-install-2.png create mode 100644 meson/docs/markdown/images/py3-install-3.png create mode 100644 meson/docs/markdown/images/win_dlvs.png create mode 100644 meson/docs/markdown/images/win_downloadmeson.png create mode 100644 meson/docs/markdown/images/win_installvs.png create mode 100644 meson/docs/markdown/images/win_vstoolsprompt.png create mode 100644 meson/docs/markdown/images/win_working.png create mode 100644 meson/docs/markdown/index.md create mode 100644 meson/docs/markdown/legal.md create mode 100644 meson/docs/markdown/snippets/add_release_note_snippets_here create mode 100644 meson/docs/meson.build create mode 100644 meson/docs/sitemap.txt create mode 100644 meson/docs/theme/extra/images/favicon.png create mode 100644 meson/docs/theme/extra/images/meson_logo.png create mode 100644 meson/docs/theme/extra/prism_components/prism-meson.js create mode 100644 meson/docs/theme/extra/prism_components/prism-meson.min.js create mode 100644 meson/docs/theme/extra/templates/brand-logo.html create mode 100644 meson/docs/theme/extra/templates/extra_head.html create mode 100644 meson/docs/theme/extra/templates/license.html create mode 100644 meson/docs/theme/extra/templates/navbar_center.html create mode 100644 meson/docs/theme/extra/templates/navbar_links.html create mode 100755 meson/ghwt.py create mode 100644 meson/graphics/meson_logo.svg create mode 100644 meson/graphics/meson_logo_big.png create mode 100644 meson/graphics/wrap_logo.svg create mode 100644 meson/man/meson.1 create mode 100644 meson/manual tests/1 wrap/main.c create mode 100644 meson/manual tests/1 wrap/meson.build create mode 100644 meson/manual tests/1 wrap/subprojects/sqlite.wrap create mode 100644 meson/manual tests/10 svn wrap/meson.build create mode 100644 meson/manual tests/10 svn wrap/prog.c create mode 100644 meson/manual tests/10 svn wrap/subprojects/samplesubproject.wrap create mode 100644 meson/manual tests/11 wrap imposter/meson.build create mode 100644 meson/manual tests/11 wrap imposter/subprojects/zlib.wrap create mode 100644 meson/manual tests/12 wrap mirror/meson.build create mode 100644 meson/manual tests/12 wrap mirror/subprojects/zlib.wrap create mode 100644 meson/manual tests/2 multiwrap/meson.build create mode 100644 meson/manual tests/2 multiwrap/prog.c create mode 100644 meson/manual tests/2 multiwrap/subprojects/libpng.wrap create mode 100644 meson/manual tests/2 multiwrap/subprojects/lua.wrap create mode 100644 meson/manual tests/2 multiwrap/subprojects/zlib.wrap create mode 100644 meson/manual tests/3 git wrap/meson.build create mode 100644 meson/manual tests/3 git wrap/prog.c create mode 100644 meson/manual tests/3 git wrap/subprojects/samplesubproject.wrap create mode 100644 meson/manual tests/4 standalone binaries/Info.plist create mode 100755 meson/manual tests/4 standalone binaries/build_linux_package.sh create mode 100755 meson/manual tests/4 standalone binaries/build_osx_package.sh create mode 100755 meson/manual tests/4 standalone binaries/build_windows_package.py create mode 100755 meson/manual tests/4 standalone binaries/linux_bundler.sh create mode 100644 meson/manual tests/4 standalone binaries/meson.build create mode 100644 meson/manual tests/4 standalone binaries/myapp.cpp create mode 100644 meson/manual tests/4 standalone binaries/myapp.icns create mode 100644 meson/manual tests/4 standalone binaries/myapp.iss create mode 100755 meson/manual tests/4 standalone binaries/myapp.sh create mode 100755 meson/manual tests/4 standalone binaries/osx_bundler.sh create mode 100644 meson/manual tests/4 standalone binaries/readme.txt create mode 100644 meson/manual tests/4 standalone binaries/template.dmg.gz create mode 100644 meson/manual tests/5 rpm/lib.c create mode 100644 meson/manual tests/5 rpm/lib.h create mode 100644 meson/manual tests/5 rpm/main.c create mode 100644 meson/manual tests/5 rpm/meson.build create mode 100644 meson/manual tests/6 hg wrap/meson.build create mode 100644 meson/manual tests/6 hg wrap/prog.c create mode 100644 meson/manual tests/6 hg wrap/subprojects/samplesubproject.wrap create mode 100644 meson/manual tests/7 vala composite widgets/meson.build create mode 100644 meson/manual tests/7 vala composite widgets/my-resources.xml create mode 100644 meson/manual tests/7 vala composite widgets/mywidget.ui create mode 100644 meson/manual tests/7 vala composite widgets/mywidget.vala create mode 100644 meson/manual tests/8 timeout/meson.build create mode 100644 meson/manual tests/8 timeout/sleepprog.c create mode 100755 meson/meson.py create mode 100644 meson/mesonbuild/__init__.py create mode 100644 meson/mesonbuild/_pathlib.py create mode 100644 meson/mesonbuild/_typing.py create mode 100644 meson/mesonbuild/arglist.py create mode 100644 meson/mesonbuild/ast/__init__.py create mode 100644 meson/mesonbuild/ast/interpreter.py create mode 100644 meson/mesonbuild/ast/introspection.py create mode 100644 meson/mesonbuild/ast/postprocess.py create mode 100644 meson/mesonbuild/ast/printer.py create mode 100644 meson/mesonbuild/ast/visitor.py create mode 100644 meson/mesonbuild/backend/__init__.py create mode 100644 meson/mesonbuild/backend/backends.py create mode 100644 meson/mesonbuild/backend/ninjabackend.py create mode 100644 meson/mesonbuild/backend/vs2010backend.py create mode 100644 meson/mesonbuild/backend/vs2012backend.py create mode 100644 meson/mesonbuild/backend/vs2013backend.py create mode 100644 meson/mesonbuild/backend/vs2015backend.py create mode 100644 meson/mesonbuild/backend/vs2017backend.py create mode 100644 meson/mesonbuild/backend/vs2019backend.py create mode 100644 meson/mesonbuild/backend/xcodebackend.py create mode 100644 meson/mesonbuild/build.py create mode 100644 meson/mesonbuild/cmake/__init__.py create mode 100644 meson/mesonbuild/cmake/client.py create mode 100644 meson/mesonbuild/cmake/common.py create mode 100644 meson/mesonbuild/cmake/data/preload.cmake create mode 100644 meson/mesonbuild/cmake/executor.py create mode 100644 meson/mesonbuild/cmake/fileapi.py create mode 100644 meson/mesonbuild/cmake/generator.py create mode 100644 meson/mesonbuild/cmake/interpreter.py create mode 100644 meson/mesonbuild/cmake/toolchain.py create mode 100644 meson/mesonbuild/cmake/traceparser.py create mode 100644 meson/mesonbuild/compilers/__init__.py create mode 100644 meson/mesonbuild/compilers/c.py create mode 100644 meson/mesonbuild/compilers/c_function_attributes.py create mode 100644 meson/mesonbuild/compilers/compilers.py create mode 100644 meson/mesonbuild/compilers/cpp.py create mode 100644 meson/mesonbuild/compilers/cs.py create mode 100644 meson/mesonbuild/compilers/cuda.py create mode 100644 meson/mesonbuild/compilers/cython.py create mode 100644 meson/mesonbuild/compilers/d.py create mode 100644 meson/mesonbuild/compilers/detect.py create mode 100644 meson/mesonbuild/compilers/fortran.py create mode 100644 meson/mesonbuild/compilers/java.py create mode 100644 meson/mesonbuild/compilers/mixins/__init__.py create mode 100644 meson/mesonbuild/compilers/mixins/arm.py create mode 100644 meson/mesonbuild/compilers/mixins/c2000.py create mode 100644 meson/mesonbuild/compilers/mixins/ccrx.py create mode 100644 meson/mesonbuild/compilers/mixins/clang.py create mode 100644 meson/mesonbuild/compilers/mixins/clike.py create mode 100644 meson/mesonbuild/compilers/mixins/compcert.py create mode 100644 meson/mesonbuild/compilers/mixins/elbrus.py create mode 100644 meson/mesonbuild/compilers/mixins/emscripten.py create mode 100644 meson/mesonbuild/compilers/mixins/gnu.py create mode 100644 meson/mesonbuild/compilers/mixins/intel.py create mode 100644 meson/mesonbuild/compilers/mixins/islinker.py create mode 100644 meson/mesonbuild/compilers/mixins/pgi.py create mode 100644 meson/mesonbuild/compilers/mixins/visualstudio.py create mode 100644 meson/mesonbuild/compilers/mixins/xc16.py create mode 100644 meson/mesonbuild/compilers/objc.py create mode 100644 meson/mesonbuild/compilers/objcpp.py create mode 100644 meson/mesonbuild/compilers/rust.py create mode 100644 meson/mesonbuild/compilers/swift.py create mode 100644 meson/mesonbuild/compilers/vala.py create mode 100644 meson/mesonbuild/coredata.py create mode 100644 meson/mesonbuild/dependencies/__init__.py create mode 100644 meson/mesonbuild/dependencies/base.py create mode 100644 meson/mesonbuild/dependencies/boost.py create mode 100644 meson/mesonbuild/dependencies/cmake.py create mode 100644 meson/mesonbuild/dependencies/coarrays.py create mode 100644 meson/mesonbuild/dependencies/configtool.py create mode 100644 meson/mesonbuild/dependencies/cuda.py create mode 100644 meson/mesonbuild/dependencies/data/CMakeLists.txt create mode 100644 meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt create mode 100644 meson/mesonbuild/dependencies/data/CMakePathInfo.txt create mode 100644 meson/mesonbuild/dependencies/detect.py create mode 100644 meson/mesonbuild/dependencies/dev.py create mode 100644 meson/mesonbuild/dependencies/dub.py create mode 100644 meson/mesonbuild/dependencies/factory.py create mode 100644 meson/mesonbuild/dependencies/framework.py create mode 100644 meson/mesonbuild/dependencies/hdf5.py create mode 100644 meson/mesonbuild/dependencies/misc.py create mode 100644 meson/mesonbuild/dependencies/mpi.py create mode 100644 meson/mesonbuild/dependencies/pkgconfig.py create mode 100644 meson/mesonbuild/dependencies/platform.py create mode 100644 meson/mesonbuild/dependencies/qt.py create mode 100644 meson/mesonbuild/dependencies/scalapack.py create mode 100644 meson/mesonbuild/dependencies/ui.py create mode 100644 meson/mesonbuild/depfile.py create mode 100644 meson/mesonbuild/envconfig.py create mode 100644 meson/mesonbuild/environment.py create mode 100644 meson/mesonbuild/interpreter/__init__.py create mode 100644 meson/mesonbuild/interpreter/compiler.py create mode 100644 meson/mesonbuild/interpreter/dependencyfallbacks.py create mode 100644 meson/mesonbuild/interpreter/interpreter.py create mode 100644 meson/mesonbuild/interpreter/interpreterobjects.py create mode 100644 meson/mesonbuild/interpreter/kwargs.py create mode 100644 meson/mesonbuild/interpreter/mesonmain.py create mode 100644 meson/mesonbuild/interpreterbase/__init__.py create mode 100644 meson/mesonbuild/interpreterbase/_unholder.py create mode 100644 meson/mesonbuild/interpreterbase/baseobjects.py create mode 100644 meson/mesonbuild/interpreterbase/decorators.py create mode 100644 meson/mesonbuild/interpreterbase/disabler.py create mode 100644 meson/mesonbuild/interpreterbase/exceptions.py create mode 100644 meson/mesonbuild/interpreterbase/helpers.py create mode 100644 meson/mesonbuild/interpreterbase/interpreterbase.py create mode 100644 meson/mesonbuild/linkers/__init__.py create mode 100644 meson/mesonbuild/linkers/detect.py create mode 100644 meson/mesonbuild/linkers/linkers.py create mode 100644 meson/mesonbuild/mcompile.py create mode 100644 meson/mesonbuild/mconf.py create mode 100644 meson/mesonbuild/mdevenv.py create mode 100644 meson/mesonbuild/mdist.py create mode 100644 meson/mesonbuild/mesondata.py create mode 100644 meson/mesonbuild/mesonlib/__init__.py create mode 100644 meson/mesonbuild/mesonlib/platform.py create mode 100644 meson/mesonbuild/mesonlib/posix.py create mode 100644 meson/mesonbuild/mesonlib/universal.py create mode 100644 meson/mesonbuild/mesonlib/win32.py create mode 100644 meson/mesonbuild/mesonmain.py create mode 100644 meson/mesonbuild/minit.py create mode 100644 meson/mesonbuild/minstall.py create mode 100644 meson/mesonbuild/mintro.py create mode 100644 meson/mesonbuild/mlog.py create mode 100644 meson/mesonbuild/modules/__init__.py create mode 100644 meson/mesonbuild/modules/cmake.py create mode 100644 meson/mesonbuild/modules/dlang.py create mode 100644 meson/mesonbuild/modules/fs.py create mode 100644 meson/mesonbuild/modules/gnome.py create mode 100644 meson/mesonbuild/modules/hotdoc.py create mode 100644 meson/mesonbuild/modules/i18n.py create mode 100644 meson/mesonbuild/modules/keyval.py create mode 100644 meson/mesonbuild/modules/modtest.py create mode 100644 meson/mesonbuild/modules/pkgconfig.py create mode 100644 meson/mesonbuild/modules/python.py create mode 100644 meson/mesonbuild/modules/python3.py create mode 100644 meson/mesonbuild/modules/qt.py create mode 100644 meson/mesonbuild/modules/qt4.py create mode 100644 meson/mesonbuild/modules/qt5.py create mode 100644 meson/mesonbuild/modules/qt6.py create mode 100644 meson/mesonbuild/modules/rpm.py create mode 100644 meson/mesonbuild/modules/sourceset.py create mode 100644 meson/mesonbuild/modules/unstable_cuda.py create mode 100644 meson/mesonbuild/modules/unstable_external_project.py create mode 100644 meson/mesonbuild/modules/unstable_icestorm.py create mode 100644 meson/mesonbuild/modules/unstable_rust.py create mode 100644 meson/mesonbuild/modules/unstable_simd.py create mode 100644 meson/mesonbuild/modules/windows.py create mode 100644 meson/mesonbuild/mparser.py create mode 100644 meson/mesonbuild/msetup.py create mode 100755 meson/mesonbuild/msubprojects.py create mode 100644 meson/mesonbuild/mtest.py create mode 100644 meson/mesonbuild/munstable_coredata.py create mode 100644 meson/mesonbuild/optinterpreter.py create mode 100644 meson/mesonbuild/programs.py create mode 100644 meson/mesonbuild/rewriter.py create mode 100644 meson/mesonbuild/scripts/__init__.py create mode 100644 meson/mesonbuild/scripts/clangformat.py create mode 100644 meson/mesonbuild/scripts/clangtidy.py create mode 100644 meson/mesonbuild/scripts/cleantrees.py create mode 100755 meson/mesonbuild/scripts/cmake_run_ctgt.py create mode 100644 meson/mesonbuild/scripts/cmd_or_ps.ps1 create mode 100644 meson/mesonbuild/scripts/coverage.py create mode 100644 meson/mesonbuild/scripts/delwithsuffix.py create mode 100644 meson/mesonbuild/scripts/depfixer.py create mode 100644 meson/mesonbuild/scripts/depscan.py create mode 100644 meson/mesonbuild/scripts/dirchanger.py create mode 100644 meson/mesonbuild/scripts/externalproject.py create mode 100644 meson/mesonbuild/scripts/gettext.py create mode 100644 meson/mesonbuild/scripts/gtkdochelper.py create mode 100644 meson/mesonbuild/scripts/hotdochelper.py create mode 100644 meson/mesonbuild/scripts/meson_exe.py create mode 100644 meson/mesonbuild/scripts/msgfmthelper.py create mode 100644 meson/mesonbuild/scripts/regen_checker.py create mode 100644 meson/mesonbuild/scripts/scanbuild.py create mode 100644 meson/mesonbuild/scripts/symbolextractor.py create mode 100644 meson/mesonbuild/scripts/tags.py create mode 100644 meson/mesonbuild/scripts/uninstall.py create mode 100644 meson/mesonbuild/scripts/vcstagger.py create mode 100644 meson/mesonbuild/scripts/yelphelper.py create mode 100644 meson/mesonbuild/templates/__init__.py create mode 100644 meson/mesonbuild/templates/cpptemplates.py create mode 100644 meson/mesonbuild/templates/cstemplates.py create mode 100644 meson/mesonbuild/templates/ctemplates.py create mode 100644 meson/mesonbuild/templates/cudatemplates.py create mode 100644 meson/mesonbuild/templates/dlangtemplates.py create mode 100644 meson/mesonbuild/templates/fortrantemplates.py create mode 100644 meson/mesonbuild/templates/javatemplates.py create mode 100644 meson/mesonbuild/templates/mesontemplates.py create mode 100644 meson/mesonbuild/templates/objcpptemplates.py create mode 100644 meson/mesonbuild/templates/objctemplates.py create mode 100644 meson/mesonbuild/templates/rusttemplates.py create mode 100644 meson/mesonbuild/templates/samplefactory.py create mode 100644 meson/mesonbuild/templates/sampleimpl.py create mode 100644 meson/mesonbuild/wrap/__init__.py create mode 100644 meson/mesonbuild/wrap/wrap.py create mode 100644 meson/mesonbuild/wrap/wraptool.py create mode 100644 meson/packaging/License.rtf create mode 100755 meson/packaging/create_zipapp.py create mode 100755 meson/packaging/createmsi.py create mode 100755 meson/packaging/createpkg.py create mode 100644 meson/packaging/macpages/English.lproj/conclusion.html create mode 100644 meson/packaging/macpages/English.lproj/license.html create mode 100644 meson/packaging/macpages/English.lproj/welcome.html create mode 100644 meson/pyproject.toml create mode 100755 meson/run_cross_test.py create mode 100755 meson/run_custom_lint.py create mode 100644 meson/run_format_tests.py create mode 100755 meson/run_meson_command_tests.py create mode 100755 meson/run_mypy.py create mode 100755 meson/run_project_tests.py create mode 100755 meson/run_single_test.py create mode 100755 meson/run_tests.py create mode 100755 meson/run_unittests.py create mode 100644 meson/setup.cfg create mode 100644 meson/setup.py create mode 100644 meson/sider.yml create mode 100755 meson/skip_ci.py create mode 100644 meson/test cases/cmake/1 basic/main.cpp create mode 100644 meson/test cases/cmake/1 basic/meson.build create mode 100644 meson/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp create mode 100644 meson/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp create mode 100644 meson/test cases/cmake/10 header only/main.cpp create mode 100644 meson/test cases/cmake/10 header only/meson.build create mode 100644 meson/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp create mode 100644 meson/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake create mode 100644 meson/test cases/cmake/11 cmake_module_path/meson.build create mode 100644 meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py create mode 100644 meson/test cases/cmake/11 cmake_module_path/test.json create mode 100644 meson/test cases/cmake/12 generator expressions/main.cpp create mode 100644 meson/test cases/cmake/12 generator expressions/meson.build create mode 100644 meson/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/12 generator expressions/subprojects/cmMod/include/cmMod.hpp create mode 100644 meson/test cases/cmake/13 system includes/main.cpp create mode 100644 meson/test cases/cmake/13 system includes/meson.build create mode 100644 meson/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.hpp create mode 100644 meson/test cases/cmake/13 system includes/subprojects/cmMod/sysInc/triggerWarn.hpp create mode 100644 meson/test cases/cmake/14 fortran threads/meson.build create mode 100644 meson/test cases/cmake/15 object library advanced/main.cpp create mode 100644 meson/test cases/cmake/15 object library advanced/meson.build create mode 100644 meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/CMakeLists.txt create mode 100644 meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/genC.cpp create mode 100644 meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.cpp create mode 100644 meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.hpp create mode 100644 meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.cpp create mode 100644 meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.hpp create mode 100644 meson/test cases/cmake/16 threads/main.cpp create mode 100644 meson/test cases/cmake/16 threads/meson.build create mode 100644 meson/test cases/cmake/16 threads/meson_options.txt create mode 100644 meson/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.hpp create mode 100644 meson/test cases/cmake/16 threads/subprojects/cmMod/main.cpp create mode 100644 meson/test cases/cmake/16 threads/test.json create mode 100644 meson/test cases/cmake/17 include path order/main.cpp create mode 100644 meson/test cases/cmake/17 include path order/meson.build create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp create mode 100644 meson/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp create mode 100644 meson/test cases/cmake/18 skip include files/main.cpp create mode 100644 meson/test cases/cmake/18 skip include files/meson.build create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp create mode 100644 meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp create mode 100644 meson/test cases/cmake/19 advanced options/main.cpp create mode 100644 meson/test cases/cmake/19 advanced options/meson.build create mode 100644 meson/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt create mode 100644 meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp create mode 100644 meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp create mode 100644 meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp create mode 100644 meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp create mode 100644 meson/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp create mode 100644 meson/test cases/cmake/19 advanced options/test.json create mode 100644 meson/test cases/cmake/2 advanced/main.cpp create mode 100644 meson/test cases/cmake/2 advanced/meson.build create mode 100644 meson/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/2 advanced/subprojects/cmMod/config.h.in create mode 100644 meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp create mode 100644 meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.hpp create mode 100644 meson/test cases/cmake/2 advanced/subprojects/cmMod/main.cpp create mode 100644 meson/test cases/cmake/2 advanced/test.json create mode 100644 meson/test cases/cmake/20 cmake file/foolib.cmake.in create mode 100644 meson/test cases/cmake/20 cmake file/meson.build create mode 100644 meson/test cases/cmake/20 cmake file/test.json create mode 100644 meson/test cases/cmake/21 shared module/meson.build create mode 100644 meson/test cases/cmake/21 shared module/prog.c create mode 100644 meson/test cases/cmake/21 shared module/runtime.c create mode 100644 meson/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c create mode 100644 meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h create mode 100644 meson/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt create mode 100644 meson/test cases/cmake/22 cmake module/meson.build create mode 100644 meson/test cases/cmake/22 cmake module/projectConfig.cmake.in create mode 100644 meson/test cases/cmake/22 cmake module/test.json create mode 100644 meson/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake create mode 100644 meson/test cases/cmake/23 cmake toolchain/meson.build create mode 100644 meson/test cases/cmake/23 cmake toolchain/nativefile.ini.in create mode 100644 meson/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt create mode 100644 meson/test cases/cmake/24 mixing languages/main.c create mode 100644 meson/test cases/cmake/24 mixing languages/meson.build create mode 100644 meson/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt create mode 100644 meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c create mode 100644 meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h create mode 100644 meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m create mode 100644 meson/test cases/cmake/25 assembler/main.c create mode 100644 meson/test cases/cmake/25 assembler/meson.build create mode 100644 meson/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt create mode 100644 meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTest.c create mode 100644 meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTestAsm.s create mode 100644 meson/test cases/cmake/3 advanced no dep/main.cpp create mode 100644 meson/test cases/cmake/3 advanced no dep/meson.build create mode 100644 meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/config.h.in create mode 100644 meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.cpp create mode 100644 meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.hpp create mode 100644 meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/main.cpp create mode 100644 meson/test cases/cmake/3 advanced no dep/test.json create mode 100644 meson/test cases/cmake/4 code gen/main.cpp create mode 100644 meson/test cases/cmake/4 code gen/meson.build create mode 100644 meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt create mode 100644 meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/main.cpp create mode 100644 meson/test cases/cmake/4 code gen/test.hpp create mode 100644 meson/test cases/cmake/5 object library/main.cpp create mode 100644 meson/test cases/cmake/5 object library/meson.build create mode 100644 meson/test cases/cmake/5 object library/subprojects/cmObjLib/CMakeLists.txt create mode 100644 meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.cpp create mode 100644 meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.hpp create mode 100644 meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.cpp create mode 100644 meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.hpp create mode 100644 meson/test cases/cmake/6 object library no dep/main.cpp create mode 100644 meson/test cases/cmake/6 object library no dep/meson.build create mode 100644 meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/CMakeLists.txt create mode 100644 meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.cpp create mode 100644 meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.hpp create mode 100644 meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.cpp create mode 100644 meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.hpp create mode 100644 meson/test cases/cmake/7 cmake options/meson.build create mode 100644 meson/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt create mode 100644 meson/test cases/cmake/7 cmake options/test.json create mode 100644 meson/test cases/cmake/8 custom command/main.cpp create mode 100644 meson/test cases/cmake/8 custom command/meson.build create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/args_test.cpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.hpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cp.cpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.cpp.am create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.hpp.am create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.cpp.am create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.hpp.am create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp create mode 100644 meson/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp create mode 100644 meson/test cases/cmake/9 disabled subproject/meson.build create mode 100644 meson/test cases/common/1 trivial/meson.build create mode 100644 meson/test cases/common/1 trivial/trivial.c create mode 100644 meson/test cases/common/10 man install/bar.2 create mode 100644 meson/test cases/common/10 man install/baz.1.in create mode 100644 meson/test cases/common/10 man install/foo.1 create mode 100644 meson/test cases/common/10 man install/foo.fr.1 create mode 100644 meson/test cases/common/10 man install/meson.build create mode 100644 meson/test cases/common/10 man install/test.json create mode 100644 meson/test cases/common/10 man install/vanishing/meson.build create mode 100644 meson/test cases/common/10 man install/vanishing/vanishing.1 create mode 100644 meson/test cases/common/10 man install/vanishing/vanishing.2 create mode 100644 meson/test cases/common/100 postconf with args/meson.build create mode 100644 meson/test cases/common/100 postconf with args/postconf.py create mode 100644 meson/test cases/common/100 postconf with args/prog.c create mode 100644 meson/test cases/common/100 postconf with args/raw.dat create mode 100644 meson/test cases/common/101 testframework options/meson.build create mode 100644 meson/test cases/common/101 testframework options/meson_options.txt create mode 100644 meson/test cases/common/101 testframework options/test.json create mode 100644 meson/test cases/common/102 extract same name/lib.c create mode 100644 meson/test cases/common/102 extract same name/main.c create mode 100644 meson/test cases/common/102 extract same name/meson.build create mode 100644 meson/test cases/common/102 extract same name/src/lib.c create mode 100644 meson/test cases/common/103 has header symbol/meson.build create mode 100644 meson/test cases/common/104 has arg/meson.build create mode 100755 meson/test cases/common/105 generatorcustom/catter.py create mode 100755 meson/test cases/common/105 generatorcustom/gen-resx.py create mode 100755 meson/test cases/common/105 generatorcustom/gen.py create mode 100644 meson/test cases/common/105 generatorcustom/main.c create mode 100644 meson/test cases/common/105 generatorcustom/meson.build create mode 100644 meson/test cases/common/105 generatorcustom/res1.txt create mode 100644 meson/test cases/common/105 generatorcustom/res2.txt create mode 100644 meson/test cases/common/106 multiple dir configure file/meson.build create mode 100644 meson/test cases/common/106 multiple dir configure file/subdir/foo.txt create mode 100644 meson/test cases/common/106 multiple dir configure file/subdir/meson.build create mode 100644 meson/test cases/common/106 multiple dir configure file/subdir/someinput.in create mode 100644 meson/test cases/common/107 spaces backslash/asm output/meson.build create mode 100644 meson/test cases/common/107 spaces backslash/comparer-end-notstring.c create mode 100644 meson/test cases/common/107 spaces backslash/comparer-end.c create mode 100644 meson/test cases/common/107 spaces backslash/comparer.c create mode 100644 meson/test cases/common/107 spaces backslash/include/comparer.h create mode 100644 meson/test cases/common/107 spaces backslash/meson.build create mode 100644 meson/test cases/common/108 ternary/meson.build create mode 100644 meson/test cases/common/109 custom target capture/data_source.txt create mode 100644 meson/test cases/common/109 custom target capture/meson.build create mode 100755 meson/test cases/common/109 custom target capture/my_compiler.py create mode 100644 meson/test cases/common/109 custom target capture/test.json create mode 100644 meson/test cases/common/11 subdir/meson.build create mode 100644 meson/test cases/common/11 subdir/subdir/meson.build create mode 100644 meson/test cases/common/11 subdir/subdir/prog.c create mode 100755 meson/test cases/common/110 allgenerate/converter.py create mode 100644 meson/test cases/common/110 allgenerate/foobar.cpp.in create mode 100644 meson/test cases/common/110 allgenerate/meson.build create mode 100644 meson/test cases/common/111 pathjoin/meson.build create mode 100644 meson/test cases/common/112 subdir subproject/meson.build create mode 100644 meson/test cases/common/112 subdir subproject/prog/meson.build create mode 100644 meson/test cases/common/112 subdir subproject/prog/prog.c create mode 100644 meson/test cases/common/112 subdir subproject/subprojects/sub/meson.build create mode 100644 meson/test cases/common/112 subdir subproject/subprojects/sub/sub.c create mode 100644 meson/test cases/common/112 subdir subproject/subprojects/sub/sub.h create mode 100644 meson/test cases/common/113 interpreter copy mutable var on assignment/meson.build create mode 100644 meson/test cases/common/114 skip/meson.build create mode 100644 meson/test cases/common/115 subproject project arguments/exe.c create mode 100644 meson/test cases/common/115 subproject project arguments/exe.cpp create mode 100644 meson/test cases/common/115 subproject project arguments/meson.build create mode 100644 meson/test cases/common/115 subproject project arguments/subprojects/subexe/meson.build create mode 100644 meson/test cases/common/115 subproject project arguments/subprojects/subexe/subexe.c create mode 100644 meson/test cases/common/116 test skip/meson.build create mode 100644 meson/test cases/common/116 test skip/test_skip.c create mode 100644 meson/test cases/common/117 shared module/meson.build create mode 100644 meson/test cases/common/117 shared module/module.c create mode 100644 meson/test cases/common/117 shared module/nosyms.c create mode 100644 meson/test cases/common/117 shared module/prog.c create mode 100644 meson/test cases/common/117 shared module/runtime.c create mode 100644 meson/test cases/common/117 shared module/test.json create mode 100644 meson/test cases/common/118 llvm ir and assembly/main.c create mode 100644 meson/test cases/common/118 llvm ir and assembly/main.cpp create mode 100644 meson/test cases/common/118 llvm ir and assembly/meson.build create mode 100644 meson/test cases/common/118 llvm ir and assembly/square-aarch64.S create mode 100644 meson/test cases/common/118 llvm ir and assembly/square-arm.S create mode 100644 meson/test cases/common/118 llvm ir and assembly/square-x86.S create mode 100644 meson/test cases/common/118 llvm ir and assembly/square-x86_64.S create mode 100644 meson/test cases/common/118 llvm ir and assembly/square.ll create mode 100644 meson/test cases/common/118 llvm ir and assembly/symbol-underscore.h create mode 100644 meson/test cases/common/119 cpp and asm/meson.build create mode 100644 meson/test cases/common/119 cpp and asm/retval-arm.S create mode 100644 meson/test cases/common/119 cpp and asm/retval-x86.S create mode 100644 meson/test cases/common/119 cpp and asm/retval-x86_64.S create mode 100644 meson/test cases/common/119 cpp and asm/symbol-underscore.h create mode 100644 meson/test cases/common/119 cpp and asm/trivial.cc create mode 100644 meson/test cases/common/12 data/datafile.dat create mode 100644 meson/test cases/common/12 data/etcfile.dat create mode 100644 meson/test cases/common/12 data/fileobject_datafile.dat create mode 100644 meson/test cases/common/12 data/meson.build create mode 100644 meson/test cases/common/12 data/runscript.sh create mode 100644 meson/test cases/common/12 data/somefile.txt create mode 100644 meson/test cases/common/12 data/test.json create mode 100644 meson/test cases/common/12 data/to_be_renamed_1.txt create mode 100644 meson/test cases/common/12 data/to_be_renamed_3.txt create mode 100644 meson/test cases/common/12 data/to_be_renamed_4.txt create mode 100644 meson/test cases/common/12 data/vanishing/meson.build create mode 100644 meson/test cases/common/12 data/vanishing/to_be_renamed_2.txt create mode 100644 meson/test cases/common/12 data/vanishing/vanishing.dat create mode 100644 meson/test cases/common/12 data/vanishing/vanishing2.dat create mode 100644 meson/test cases/common/120 extract all shared library/extractor.h create mode 100644 meson/test cases/common/120 extract all shared library/four.c create mode 100644 meson/test cases/common/120 extract all shared library/func1234.def create mode 100644 meson/test cases/common/120 extract all shared library/meson.build create mode 100644 meson/test cases/common/120 extract all shared library/one.c create mode 100644 meson/test cases/common/120 extract all shared library/prog.c create mode 100644 meson/test cases/common/120 extract all shared library/three.c create mode 100644 meson/test cases/common/120 extract all shared library/two.c create mode 100644 meson/test cases/common/121 object only target/meson.build create mode 100755 meson/test cases/common/121 object only target/obj_generator.py create mode 100644 meson/test cases/common/121 object only target/objdir/meson.build create mode 100644 meson/test cases/common/121 object only target/objdir/source4.c create mode 100644 meson/test cases/common/121 object only target/objdir/source5.c create mode 100644 meson/test cases/common/121 object only target/objdir/source6.c create mode 100644 meson/test cases/common/121 object only target/prog.c create mode 100644 meson/test cases/common/121 object only target/source.c create mode 100644 meson/test cases/common/121 object only target/source2.c create mode 100644 meson/test cases/common/121 object only target/source2.def create mode 100644 meson/test cases/common/121 object only target/source3.c create mode 100644 meson/test cases/common/121 object only target/test.json create mode 100644 meson/test cases/common/122 no buildincdir/include/header.h create mode 100644 meson/test cases/common/122 no buildincdir/meson.build create mode 100644 meson/test cases/common/122 no buildincdir/prog.c create mode 100644 meson/test cases/common/123 custom target directory install/docgen.py create mode 100644 meson/test cases/common/123 custom target directory install/meson.build create mode 100644 meson/test cases/common/123 custom target directory install/test.json create mode 100644 meson/test cases/common/124 dependency file generation/main .c create mode 100644 meson/test cases/common/124 dependency file generation/meson.build create mode 100644 meson/test cases/common/125 configure file in generator/inc/confdata.in create mode 100644 meson/test cases/common/125 configure file in generator/inc/meson.build create mode 100644 meson/test cases/common/125 configure file in generator/meson.build create mode 100755 meson/test cases/common/125 configure file in generator/src/gen.py create mode 100644 meson/test cases/common/125 configure file in generator/src/main.c create mode 100644 meson/test cases/common/125 configure file in generator/src/meson.build create mode 100644 meson/test cases/common/125 configure file in generator/src/source create mode 100644 meson/test cases/common/126 generated llvm ir/copyfile.py create mode 100644 meson/test cases/common/126 generated llvm ir/main.c create mode 100644 meson/test cases/common/126 generated llvm ir/meson.build create mode 100644 meson/test cases/common/126 generated llvm ir/square.ll.in create mode 100644 meson/test cases/common/127 generated assembly/copyfile.py create mode 100644 meson/test cases/common/127 generated assembly/empty.c create mode 100644 meson/test cases/common/127 generated assembly/main.c create mode 100644 meson/test cases/common/127 generated assembly/meson.build create mode 100644 meson/test cases/common/127 generated assembly/square-arm.S.in create mode 100644 meson/test cases/common/127 generated assembly/square-x86.S.in create mode 100644 meson/test cases/common/127 generated assembly/square-x86_64.S.in create mode 100644 meson/test cases/common/127 generated assembly/square.def create mode 100644 meson/test cases/common/127 generated assembly/symbol-underscore.h create mode 100644 meson/test cases/common/128 build by default targets in tests/main.c create mode 100644 meson/test cases/common/128 build by default targets in tests/meson.build create mode 100644 meson/test cases/common/128 build by default targets in tests/write_file.py create mode 100644 meson/test cases/common/129 build by default/checkexists.py create mode 100644 meson/test cases/common/129 build by default/foo.c create mode 100644 meson/test cases/common/129 build by default/meson.build create mode 100644 meson/test cases/common/129 build by default/mygen.py create mode 100644 meson/test cases/common/129 build by default/source.txt create mode 100644 meson/test cases/common/13 pch/c/meson.build create mode 100644 meson/test cases/common/13 pch/c/pch/prog.h create mode 100644 meson/test cases/common/13 pch/c/prog.c create mode 100644 meson/test cases/common/13 pch/cpp/meson.build create mode 100644 meson/test cases/common/13 pch/cpp/pch/prog.hh create mode 100644 meson/test cases/common/13 pch/cpp/prog.cc create mode 100644 meson/test cases/common/13 pch/generated/gen_custom.py create mode 100644 meson/test cases/common/13 pch/generated/gen_generator.py create mode 100644 meson/test cases/common/13 pch/generated/generated_generator.in create mode 100644 meson/test cases/common/13 pch/generated/meson.build create mode 100644 meson/test cases/common/13 pch/generated/pch/prog.h create mode 100644 meson/test cases/common/13 pch/generated/prog.c create mode 100644 meson/test cases/common/13 pch/meson.build create mode 100644 meson/test cases/common/13 pch/mixed/func.c create mode 100644 meson/test cases/common/13 pch/mixed/main.cc create mode 100644 meson/test cases/common/13 pch/mixed/meson.build create mode 100644 meson/test cases/common/13 pch/mixed/pch/func.h create mode 100644 meson/test cases/common/13 pch/mixed/pch/main.h create mode 100644 meson/test cases/common/13 pch/userDefined/meson.build create mode 100644 meson/test cases/common/13 pch/userDefined/pch/pch.c create mode 100644 meson/test cases/common/13 pch/userDefined/pch/pch.h create mode 100644 meson/test cases/common/13 pch/userDefined/prog.c create mode 100644 meson/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h create mode 100644 meson/test cases/common/13 pch/withIncludeDirectories/meson.build create mode 100644 meson/test cases/common/13 pch/withIncludeDirectories/pch/prog.h create mode 100644 meson/test cases/common/13 pch/withIncludeDirectories/prog.c create mode 100644 meson/test cases/common/13 pch/withIncludeFile/meson.build create mode 100644 meson/test cases/common/13 pch/withIncludeFile/pch/prog.h create mode 100644 meson/test cases/common/13 pch/withIncludeFile/prog.c create mode 100644 meson/test cases/common/130 include order/ctsub/copyfile.py create mode 100644 meson/test cases/common/130 include order/ctsub/emptyfile.c create mode 100644 meson/test cases/common/130 include order/ctsub/main.h create mode 100644 meson/test cases/common/130 include order/ctsub/meson.build create mode 100644 meson/test cases/common/130 include order/inc1/hdr.h create mode 100644 meson/test cases/common/130 include order/inc2/hdr.h create mode 100644 meson/test cases/common/130 include order/meson.build create mode 100644 meson/test cases/common/130 include order/ordertest.c create mode 100644 meson/test cases/common/130 include order/sub1/main.h create mode 100644 meson/test cases/common/130 include order/sub1/meson.build create mode 100644 meson/test cases/common/130 include order/sub1/some.c create mode 100644 meson/test cases/common/130 include order/sub1/some.h create mode 100644 meson/test cases/common/130 include order/sub2/main.h create mode 100644 meson/test cases/common/130 include order/sub2/meson.build create mode 100644 meson/test cases/common/130 include order/sub3/main.h create mode 100644 meson/test cases/common/130 include order/sub3/meson.build create mode 100644 meson/test cases/common/130 include order/sub4/main.c create mode 100644 meson/test cases/common/130 include order/sub4/main.h create mode 100644 meson/test cases/common/130 include order/sub4/meson.build create mode 100644 meson/test cases/common/131 override options/four.c create mode 100644 meson/test cases/common/131 override options/meson.build create mode 100644 meson/test cases/common/131 override options/one.c create mode 100644 meson/test cases/common/131 override options/three.c create mode 100644 meson/test cases/common/131 override options/two.c create mode 100644 meson/test cases/common/132 get define/concat.h create mode 100644 meson/test cases/common/132 get define/meson.build create mode 100644 meson/test cases/common/132 get define/meson_options.txt create mode 100644 meson/test cases/common/133 c cpp and asm/main.c create mode 100644 meson/test cases/common/133 c cpp and asm/main.cpp create mode 100644 meson/test cases/common/133 c cpp and asm/meson.build create mode 100644 meson/test cases/common/133 c cpp and asm/retval-arm.S create mode 100644 meson/test cases/common/133 c cpp and asm/retval-x86.S create mode 100644 meson/test cases/common/133 c cpp and asm/retval-x86_64.S create mode 100644 meson/test cases/common/133 c cpp and asm/somelib.c create mode 100644 meson/test cases/common/133 c cpp and asm/symbol-underscore.h create mode 100644 meson/test cases/common/134 compute int/config.h.in create mode 100644 meson/test cases/common/134 compute int/foobar.h create mode 100644 meson/test cases/common/134 compute int/meson.build create mode 100644 meson/test cases/common/134 compute int/prog.c.in create mode 100644 meson/test cases/common/135 custom target object output/meson.build create mode 100644 meson/test cases/common/135 custom target object output/obj_generator.py create mode 100644 meson/test cases/common/135 custom target object output/objdir/meson.build create mode 100644 meson/test cases/common/135 custom target object output/objdir/source.c create mode 100644 meson/test cases/common/135 custom target object output/progdir/meson.build create mode 100644 meson/test cases/common/135 custom target object output/progdir/prog.c create mode 100644 meson/test cases/common/136 empty build file/meson.build create mode 100644 meson/test cases/common/136 empty build file/subdir/meson.build create mode 100644 meson/test cases/common/137 whole archive/exe/meson.build create mode 100644 meson/test cases/common/137 whole archive/exe2/meson.build create mode 100644 meson/test cases/common/137 whole archive/exe3/meson.build create mode 100644 meson/test cases/common/137 whole archive/exe4/meson.build create mode 100644 meson/test cases/common/137 whole archive/func1.c create mode 100644 meson/test cases/common/137 whole archive/func2.c create mode 100644 meson/test cases/common/137 whole archive/meson.build create mode 100644 meson/test cases/common/137 whole archive/mylib.h create mode 100644 meson/test cases/common/137 whole archive/prog.c create mode 100644 meson/test cases/common/137 whole archive/sh_func2_dep_func1/meson.build create mode 100644 meson/test cases/common/137 whole archive/sh_func2_linked_func1/meson.build create mode 100644 meson/test cases/common/137 whole archive/sh_func2_transdep_func1/meson.build create mode 100644 meson/test cases/common/137 whole archive/sh_only_link_whole/meson.build create mode 100644 meson/test cases/common/137 whole archive/st_func1/meson.build create mode 100644 meson/test cases/common/137 whole archive/st_func2/meson.build create mode 100644 meson/test cases/common/138 C and CPP link/dummy.c create mode 100644 meson/test cases/common/138 C and CPP link/foo.c create mode 100644 meson/test cases/common/138 C and CPP link/foo.cpp create mode 100644 meson/test cases/common/138 C and CPP link/foo.h create mode 100644 meson/test cases/common/138 C and CPP link/foo.hpp create mode 100644 meson/test cases/common/138 C and CPP link/foobar.c create mode 100644 meson/test cases/common/138 C and CPP link/foobar.h create mode 100644 meson/test cases/common/138 C and CPP link/meson.build create mode 100644 meson/test cases/common/138 C and CPP link/sub.c create mode 100644 meson/test cases/common/138 C and CPP link/sub.h create mode 100644 meson/test cases/common/139 mesonintrospect from scripts/check_env.py create mode 100644 meson/test cases/common/139 mesonintrospect from scripts/check_introspection.py create mode 100644 meson/test cases/common/139 mesonintrospect from scripts/meson.build create mode 100644 meson/test cases/common/14 configure file/basename.py create mode 100644 meson/test cases/common/14 configure file/check_file.py create mode 100644 meson/test cases/common/14 configure file/check_inputs.py create mode 100644 meson/test cases/common/14 configure file/config.h create mode 100644 meson/test cases/common/14 configure file/config.h.in create mode 100644 meson/test cases/common/14 configure file/config4a.h.in create mode 100644 meson/test cases/common/14 configure file/config4b.h.in create mode 100644 meson/test cases/common/14 configure file/config5.h.in create mode 100644 meson/test cases/common/14 configure file/config6.h.in create mode 100644 meson/test cases/common/14 configure file/config7.h.in create mode 100644 meson/test cases/common/14 configure file/config8.h.in create mode 100644 meson/test cases/common/14 configure file/depfile create mode 100644 meson/test cases/common/14 configure file/differentafterbasename1.in create mode 100644 meson/test cases/common/14 configure file/differentafterbasename2.in create mode 100644 meson/test cases/common/14 configure file/dummy.dat create mode 100644 meson/test cases/common/14 configure file/dumpprog.c create mode 100644 meson/test cases/common/14 configure file/file_contains.py create mode 100755 meson/test cases/common/14 configure file/generator-deps.py create mode 100755 meson/test cases/common/14 configure file/generator-without-input-file.py create mode 100755 meson/test cases/common/14 configure file/generator.py create mode 100644 meson/test cases/common/14 configure file/invalid-utf8.bin.in create mode 100644 meson/test cases/common/14 configure file/meson.build create mode 100644 meson/test cases/common/14 configure file/nosubst-nocopy1.txt.in create mode 100644 meson/test cases/common/14 configure file/nosubst-nocopy2.txt.in create mode 100644 meson/test cases/common/14 configure file/prog.c create mode 100644 meson/test cases/common/14 configure file/prog2.c create mode 100644 meson/test cases/common/14 configure file/prog4.c create mode 100644 meson/test cases/common/14 configure file/prog5.c create mode 100644 meson/test cases/common/14 configure file/prog6.c create mode 100644 meson/test cases/common/14 configure file/prog7.c create mode 100644 meson/test cases/common/14 configure file/prog9.c create mode 100644 meson/test cases/common/14 configure file/sameafterbasename.in create mode 100644 meson/test cases/common/14 configure file/sameafterbasename.in2 create mode 100644 meson/test cases/common/14 configure file/subdir/meson.build create mode 100644 meson/test cases/common/14 configure file/test.json create mode 100644 meson/test cases/common/14 configure file/test.py.in create mode 100644 meson/test cases/common/14 configure file/touch.py create mode 100755 meson/test cases/common/140 custom target multiple outputs/generator.py create mode 100644 meson/test cases/common/140 custom target multiple outputs/meson.build create mode 100644 meson/test cases/common/140 custom target multiple outputs/test.json create mode 100644 meson/test cases/common/141 special characters/arg-char-test.c create mode 100644 meson/test cases/common/141 special characters/arg-string-test.c create mode 100644 meson/test cases/common/141 special characters/arg-unquoted-test.c create mode 100644 meson/test cases/common/141 special characters/check_quoting.py create mode 100644 meson/test cases/common/141 special characters/meson.build create mode 100644 meson/test cases/common/141 special characters/test.json create mode 100644 meson/test cases/common/142 nested links/meson.build create mode 100644 meson/test cases/common/142 nested links/xephyr.c create mode 100644 meson/test cases/common/143 list of file sources/foo create mode 100644 meson/test cases/common/143 list of file sources/gen.py create mode 100644 meson/test cases/common/143 list of file sources/meson.build create mode 100644 meson/test cases/common/144 link depends custom target/foo.c create mode 100755 meson/test cases/common/144 link depends custom target/make_file.py create mode 100644 meson/test cases/common/144 link depends custom target/meson.build create mode 100644 meson/test cases/common/145 recursive linking/3rdorderdeps/lib.c.in create mode 100644 meson/test cases/common/145 recursive linking/3rdorderdeps/main.c.in create mode 100644 meson/test cases/common/145 recursive linking/3rdorderdeps/meson.build create mode 100644 meson/test cases/common/145 recursive linking/circular/lib1.c create mode 100644 meson/test cases/common/145 recursive linking/circular/lib2.c create mode 100644 meson/test cases/common/145 recursive linking/circular/lib3.c create mode 100644 meson/test cases/common/145 recursive linking/circular/main.c create mode 100644 meson/test cases/common/145 recursive linking/circular/meson.build create mode 100644 meson/test cases/common/145 recursive linking/circular/prop1.c create mode 100644 meson/test cases/common/145 recursive linking/circular/prop2.c create mode 100644 meson/test cases/common/145 recursive linking/circular/prop3.c create mode 100644 meson/test cases/common/145 recursive linking/edge-cases/libsto.c create mode 100644 meson/test cases/common/145 recursive linking/edge-cases/meson.build create mode 100644 meson/test cases/common/145 recursive linking/edge-cases/shstmain.c create mode 100644 meson/test cases/common/145 recursive linking/edge-cases/stobuilt.c create mode 100644 meson/test cases/common/145 recursive linking/edge-cases/stomain.c create mode 100644 meson/test cases/common/145 recursive linking/lib.h create mode 100644 meson/test cases/common/145 recursive linking/main.c create mode 100644 meson/test cases/common/145 recursive linking/meson.build create mode 100644 meson/test cases/common/145 recursive linking/shnodep/lib.c create mode 100644 meson/test cases/common/145 recursive linking/shnodep/meson.build create mode 100644 meson/test cases/common/145 recursive linking/shshdep/lib.c create mode 100644 meson/test cases/common/145 recursive linking/shshdep/meson.build create mode 100644 meson/test cases/common/145 recursive linking/shstdep/lib.c create mode 100644 meson/test cases/common/145 recursive linking/shstdep/meson.build create mode 100644 meson/test cases/common/145 recursive linking/stnodep/lib.c create mode 100644 meson/test cases/common/145 recursive linking/stnodep/meson.build create mode 100644 meson/test cases/common/145 recursive linking/stshdep/lib.c create mode 100644 meson/test cases/common/145 recursive linking/stshdep/meson.build create mode 100644 meson/test cases/common/145 recursive linking/ststdep/lib.c create mode 100644 meson/test cases/common/145 recursive linking/ststdep/meson.build create mode 100644 meson/test cases/common/146 library at root/lib.c create mode 100644 meson/test cases/common/146 library at root/main/main.c create mode 100644 meson/test cases/common/146 library at root/main/meson.build create mode 100644 meson/test cases/common/146 library at root/meson.build create mode 100644 meson/test cases/common/147 simd/fallback.c create mode 100644 meson/test cases/common/147 simd/include/simdheader.h create mode 100644 meson/test cases/common/147 simd/meson.build create mode 100644 meson/test cases/common/147 simd/simd_avx.c create mode 100644 meson/test cases/common/147 simd/simd_avx2.c create mode 100644 meson/test cases/common/147 simd/simd_mmx.c create mode 100644 meson/test cases/common/147 simd/simd_neon.c create mode 100644 meson/test cases/common/147 simd/simd_sse.c create mode 100644 meson/test cases/common/147 simd/simd_sse2.c create mode 100644 meson/test cases/common/147 simd/simd_sse3.c create mode 100644 meson/test cases/common/147 simd/simd_sse41.c create mode 100644 meson/test cases/common/147 simd/simd_sse42.c create mode 100644 meson/test cases/common/147 simd/simd_ssse3.c create mode 100644 meson/test cases/common/147 simd/simdchecker.c create mode 100644 meson/test cases/common/147 simd/simdfuncs.h create mode 100644 meson/test cases/common/148 shared module resolving symbol in executable/meson.build create mode 100644 meson/test cases/common/148 shared module resolving symbol in executable/module.c create mode 100644 meson/test cases/common/148 shared module resolving symbol in executable/prog.c create mode 100644 meson/test cases/common/149 dotinclude/dotproc.c create mode 100644 meson/test cases/common/149 dotinclude/meson.build create mode 100644 meson/test cases/common/149 dotinclude/stdio.h create mode 100644 meson/test cases/common/15 if/meson.build create mode 100644 meson/test cases/common/15 if/prog.c create mode 100644 meson/test cases/common/150 reserved targets/all/meson.build create mode 100644 meson/test cases/common/150 reserved targets/benchmark/meson.build create mode 100644 meson/test cases/common/150 reserved targets/clean-ctlist/meson.build create mode 100644 meson/test cases/common/150 reserved targets/clean-gcda/meson.build create mode 100644 meson/test cases/common/150 reserved targets/clean-gcno/meson.build create mode 100644 meson/test cases/common/150 reserved targets/clean/meson.build create mode 100644 meson/test cases/common/150 reserved targets/coverage-html/meson.build create mode 100644 meson/test cases/common/150 reserved targets/coverage-text/meson.build create mode 100644 meson/test cases/common/150 reserved targets/coverage-xml/meson.build create mode 100644 meson/test cases/common/150 reserved targets/coverage/meson.build create mode 100644 meson/test cases/common/150 reserved targets/dist/meson.build create mode 100644 meson/test cases/common/150 reserved targets/distcheck/meson.build create mode 100644 meson/test cases/common/150 reserved targets/install/meson.build create mode 100644 meson/test cases/common/150 reserved targets/meson.build create mode 100644 meson/test cases/common/150 reserved targets/phony/meson.build create mode 100644 meson/test cases/common/150 reserved targets/reconfigure/meson.build create mode 100644 meson/test cases/common/150 reserved targets/runtarget/echo.py create mode 100644 meson/test cases/common/150 reserved targets/runtarget/meson.build create mode 100644 meson/test cases/common/150 reserved targets/scan-build/meson.build create mode 100644 meson/test cases/common/150 reserved targets/test.c create mode 100644 meson/test cases/common/150 reserved targets/test/meson.build create mode 100644 meson/test cases/common/150 reserved targets/uninstall/meson.build create mode 100644 meson/test cases/common/151 duplicate source names/dir1/file.c create mode 100644 meson/test cases/common/151 duplicate source names/dir1/meson.build create mode 100644 meson/test cases/common/151 duplicate source names/dir2/dir1/file.c create mode 100644 meson/test cases/common/151 duplicate source names/dir2/file.c create mode 100644 meson/test cases/common/151 duplicate source names/dir2/meson.build create mode 100644 meson/test cases/common/151 duplicate source names/dir3/dir1/file.c create mode 100644 meson/test cases/common/151 duplicate source names/dir3/file.c create mode 100644 meson/test cases/common/151 duplicate source names/dir3/meson.build create mode 100644 meson/test cases/common/151 duplicate source names/meson.build create mode 100644 meson/test cases/common/152 index customtarget/check_args.py create mode 100644 meson/test cases/common/152 index customtarget/gen_sources.py create mode 100644 meson/test cases/common/152 index customtarget/lib.c create mode 100644 meson/test cases/common/152 index customtarget/meson.build create mode 100644 meson/test cases/common/152 index customtarget/subdir/foo.c create mode 100644 meson/test cases/common/152 index customtarget/subdir/meson.build create mode 100644 meson/test cases/common/153 wrap file should not failed/meson.build create mode 100644 meson/test cases/common/153 wrap file should not failed/src/meson.build create mode 100644 meson/test cases/common/153 wrap file should not failed/src/subprojects/foo/prog2.c create mode 100644 meson/test cases/common/153 wrap file should not failed/src/subprojects/prog.c create mode 100644 meson/test cases/common/153 wrap file should not failed/src/test.c create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/.gitignore create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/bar.wrap create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/foo.wrap create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/patchdir.wrap create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/foo.c create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/meson.build create mode 100644 meson/test cases/common/153 wrap file should not failed/subprojects/zlib.wrap create mode 100644 meson/test cases/common/154 includedir subproj/meson.build create mode 100644 meson/test cases/common/154 includedir subproj/prog.c create mode 100644 meson/test cases/common/154 includedir subproj/subprojects/inctest/include/incfile.h create mode 100644 meson/test cases/common/154 includedir subproj/subprojects/inctest/meson.build create mode 100644 meson/test cases/common/155 subproject dir name collision/a.c create mode 100644 meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/b.c create mode 100644 meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/meson.build create mode 100644 meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/c.c create mode 100644 meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/meson.build create mode 100644 meson/test cases/common/155 subproject dir name collision/meson.build create mode 100644 meson/test cases/common/155 subproject dir name collision/other_subdir/custom_subproject_dir/other.c create mode 100644 meson/test cases/common/155 subproject dir name collision/other_subdir/meson.build create mode 100644 meson/test cases/common/156 config tool variable/meson.build create mode 100644 meson/test cases/common/157 custom target subdir depend files/copyfile.py create mode 100644 meson/test cases/common/157 custom target subdir depend files/meson.build create mode 100644 meson/test cases/common/157 custom target subdir depend files/subdir/dep.dat create mode 100644 meson/test cases/common/157 custom target subdir depend files/subdir/foo.c.in create mode 100644 meson/test cases/common/157 custom target subdir depend files/subdir/meson.build create mode 100644 meson/test cases/common/158 disabler/meson.build create mode 100644 meson/test cases/common/159 array option/meson.build create mode 100644 meson/test cases/common/159 array option/meson_options.txt create mode 100644 meson/test cases/common/16 comparison/meson.build create mode 100644 meson/test cases/common/16 comparison/prog.c create mode 100644 meson/test cases/common/160 custom target template substitution/checkcopy.py create mode 100644 meson/test cases/common/160 custom target template substitution/foo.c.in create mode 100644 meson/test cases/common/160 custom target template substitution/meson.build create mode 100644 meson/test cases/common/161 not-found dependency/meson.build create mode 100644 meson/test cases/common/161 not-found dependency/sub/meson.build create mode 100644 meson/test cases/common/161 not-found dependency/subprojects/trivial/meson.build create mode 100644 meson/test cases/common/161 not-found dependency/subprojects/trivial/trivial.c create mode 100644 meson/test cases/common/161 not-found dependency/testlib.c create mode 100644 meson/test cases/common/162 subdir if_found/meson.build create mode 100644 meson/test cases/common/162 subdir if_found/subdir/meson.build create mode 100644 meson/test cases/common/163 default options prefix dependent defaults/meson.build create mode 100644 meson/test cases/common/164 dependency factory/meson.build create mode 100644 meson/test cases/common/165 get project license/bar.c create mode 100644 meson/test cases/common/165 get project license/meson.build create mode 100644 meson/test cases/common/166 yield/meson.build create mode 100644 meson/test cases/common/166 yield/meson_options.txt create mode 100644 meson/test cases/common/166 yield/subprojects/sub/meson.build create mode 100644 meson/test cases/common/166 yield/subprojects/sub/meson_options.txt create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/a.c create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/b.c create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/meson.build create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/meson.build create mode 100644 meson/test cases/common/167 subproject nested subproject dirs/prog.c create mode 100644 meson/test cases/common/168 preserve gendir/base.inp create mode 100644 meson/test cases/common/168 preserve gendir/com/mesonbuild/subbie.inp create mode 100755 meson/test cases/common/168 preserve gendir/genprog.py create mode 100644 meson/test cases/common/168 preserve gendir/meson.build create mode 100644 meson/test cases/common/168 preserve gendir/testprog.c create mode 100644 meson/test cases/common/169 source in dep/bar.cpp create mode 100644 meson/test cases/common/169 source in dep/foo.c create mode 100644 meson/test cases/common/169 source in dep/generated/funname create mode 100755 meson/test cases/common/169 source in dep/generated/genheader.py create mode 100644 meson/test cases/common/169 source in dep/generated/main.c create mode 100644 meson/test cases/common/169 source in dep/generated/meson.build create mode 100644 meson/test cases/common/169 source in dep/meson.build create mode 100644 meson/test cases/common/17 array/func.c create mode 100644 meson/test cases/common/17 array/meson.build create mode 100644 meson/test cases/common/17 array/prog.c create mode 100644 meson/test cases/common/170 generator link whole/export.h create mode 100755 meson/test cases/common/170 generator link whole/generator.py create mode 100644 meson/test cases/common/170 generator link whole/main.c create mode 100644 meson/test cases/common/170 generator link whole/meson.build create mode 100644 meson/test cases/common/170 generator link whole/meson_test_function.tmpl create mode 100644 meson/test cases/common/170 generator link whole/pull_meson_test_function.c create mode 100644 meson/test cases/common/171 initial c_args/meson.build create mode 100644 meson/test cases/common/171 initial c_args/test.json create mode 100644 meson/test cases/common/172 identical target name in subproject flat layout/foo.c create mode 100644 meson/test cases/common/172 identical target name in subproject flat layout/main.c create mode 100644 meson/test cases/common/172 identical target name in subproject flat layout/meson.build create mode 100644 meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/foo.c create mode 100644 meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/meson.build create mode 100644 meson/test cases/common/173 as-needed/config.h create mode 100644 meson/test cases/common/173 as-needed/libA.cpp create mode 100644 meson/test cases/common/173 as-needed/libA.h create mode 100644 meson/test cases/common/173 as-needed/libB.cpp create mode 100644 meson/test cases/common/173 as-needed/main.cpp create mode 100644 meson/test cases/common/173 as-needed/meson.build create mode 100644 meson/test cases/common/174 ndebug if-release enabled/main.c create mode 100644 meson/test cases/common/174 ndebug if-release enabled/meson.build create mode 100644 meson/test cases/common/175 ndebug if-release disabled/main.c create mode 100644 meson/test cases/common/175 ndebug if-release disabled/meson.build create mode 100644 meson/test cases/common/176 subproject version/meson.build create mode 100644 meson/test cases/common/176 subproject version/subprojects/a/meson.build create mode 100644 meson/test cases/common/177 subdir_done/meson.build create mode 100644 meson/test cases/common/178 bothlibraries/dummy.py create mode 100644 meson/test cases/common/178 bothlibraries/libfile.c create mode 100644 meson/test cases/common/178 bothlibraries/main.c create mode 100644 meson/test cases/common/178 bothlibraries/meson.build create mode 100644 meson/test cases/common/178 bothlibraries/mylib.h create mode 100644 meson/test cases/common/179 escape and unicode/file.c.in create mode 100644 meson/test cases/common/179 escape and unicode/file.py create mode 100644 meson/test cases/common/179 escape and unicode/find.py create mode 100644 meson/test cases/common/179 escape and unicode/fun.c create mode 100644 meson/test cases/common/179 escape and unicode/main.c create mode 100644 meson/test cases/common/179 escape and unicode/meson.build create mode 100644 meson/test cases/common/18 includedir/include/func.h create mode 100644 meson/test cases/common/18 includedir/meson.build create mode 100644 meson/test cases/common/18 includedir/src/func.c create mode 100644 meson/test cases/common/18 includedir/src/meson.build create mode 100644 meson/test cases/common/18 includedir/src/prog.c create mode 100644 meson/test cases/common/180 has link arg/meson.build create mode 100644 meson/test cases/common/181 same target name flat layout/foo.c create mode 100644 meson/test cases/common/181 same target name flat layout/main.c create mode 100644 meson/test cases/common/181 same target name flat layout/meson.build create mode 100644 meson/test cases/common/181 same target name flat layout/subdir/foo.c create mode 100644 meson/test cases/common/181 same target name flat layout/subdir/meson.build create mode 100644 meson/test cases/common/182 find override/meson.build create mode 100644 meson/test cases/common/182 find override/otherdir/main.c create mode 100644 meson/test cases/common/182 find override/otherdir/main2.c create mode 100644 meson/test cases/common/182 find override/otherdir/meson.build create mode 100644 meson/test cases/common/182 find override/otherdir/source.desc create mode 100644 meson/test cases/common/182 find override/otherdir/source2.desc create mode 100755 meson/test cases/common/182 find override/subdir/converter.py create mode 100755 meson/test cases/common/182 find override/subdir/gencodegen.py.in create mode 100644 meson/test cases/common/182 find override/subdir/meson.build create mode 100644 meson/test cases/common/182 find override/subprojects/sub.wrap create mode 100644 meson/test cases/common/182 find override/subprojects/sub/meson.build create mode 100644 meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.c create mode 100644 meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.h create mode 100644 meson/test cases/common/183 partial dependency/declare_dependency/main.c create mode 100644 meson/test cases/common/183 partial dependency/declare_dependency/meson.build create mode 100644 meson/test cases/common/183 partial dependency/declare_dependency/other.c create mode 100644 meson/test cases/common/183 partial dependency/meson.build create mode 100644 meson/test cases/common/184 openmp/main.c create mode 100644 meson/test cases/common/184 openmp/main.cpp create mode 100644 meson/test cases/common/184 openmp/main.f90 create mode 100644 meson/test cases/common/184 openmp/meson.build create mode 100644 meson/test cases/common/185 same target name/file.c create mode 100644 meson/test cases/common/185 same target name/meson.build create mode 100644 meson/test cases/common/185 same target name/sub/file2.c create mode 100644 meson/test cases/common/185 same target name/sub/meson.build create mode 100755 meson/test cases/common/186 test depends/gen.py create mode 100644 meson/test cases/common/186 test depends/main.c create mode 100644 meson/test cases/common/186 test depends/meson.build create mode 100755 meson/test cases/common/186 test depends/test.py create mode 100644 meson/test cases/common/187 args flattening/meson.build create mode 100644 meson/test cases/common/188 dict/meson.build create mode 100644 meson/test cases/common/188 dict/prog.c create mode 100644 meson/test cases/common/189 check header/meson.build create mode 100644 meson/test cases/common/189 check header/ouagadougou.h create mode 100644 meson/test cases/common/19 header in file list/header.h create mode 100644 meson/test cases/common/19 header in file list/meson.build create mode 100644 meson/test cases/common/19 header in file list/prog.c create mode 100644 meson/test cases/common/190 install_mode/config.h.in create mode 100644 meson/test cases/common/190 install_mode/data_source.txt create mode 100644 meson/test cases/common/190 install_mode/foo.1 create mode 100644 meson/test cases/common/190 install_mode/meson.build create mode 100644 meson/test cases/common/190 install_mode/rootdir.h create mode 100644 meson/test cases/common/190 install_mode/runscript.sh create mode 100644 meson/test cases/common/190 install_mode/stat.c create mode 100644 meson/test cases/common/190 install_mode/sub1/second.dat create mode 100644 meson/test cases/common/190 install_mode/sub2/stub create mode 100644 meson/test cases/common/190 install_mode/test.json create mode 100644 meson/test cases/common/190 install_mode/trivial.c create mode 100644 meson/test cases/common/191 subproject array version/meson.build create mode 100644 meson/test cases/common/191 subproject array version/subprojects/foo/meson.build create mode 100644 meson/test cases/common/192 feature option/meson.build create mode 100644 meson/test cases/common/192 feature option/meson_options.txt create mode 100644 meson/test cases/common/193 feature option disabled/meson.build create mode 100644 meson/test cases/common/193 feature option disabled/meson_options.txt create mode 100644 meson/test cases/common/194 static threads/lib1.c create mode 100644 meson/test cases/common/194 static threads/lib2.c create mode 100644 meson/test cases/common/194 static threads/meson.build create mode 100644 meson/test cases/common/194 static threads/prog.c create mode 100644 meson/test cases/common/195 generator in subdir/com/mesonbuild/genprog.py create mode 100644 meson/test cases/common/195 generator in subdir/com/mesonbuild/meson.build create mode 100644 meson/test cases/common/195 generator in subdir/com/mesonbuild/subbie.inp create mode 100644 meson/test cases/common/195 generator in subdir/com/mesonbuild/testprog.c create mode 100644 meson/test cases/common/195 generator in subdir/meson.build create mode 100644 meson/test cases/common/196 subproject with features/meson.build create mode 100644 meson/test cases/common/196 subproject with features/meson_options.txt create mode 100644 meson/test cases/common/196 subproject with features/nothing.c create mode 100644 meson/test cases/common/196 subproject with features/subprojects/auto_sub_with_missing_dep/meson.build create mode 100644 meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/meson.build create mode 100644 meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.c create mode 100644 meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.h create mode 100644 meson/test cases/common/196 subproject with features/subprojects/disabled_sub/meson.build create mode 100644 meson/test cases/common/196 subproject with features/subprojects/sub/lib/meson.build create mode 100644 meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.c create mode 100644 meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.h create mode 100644 meson/test cases/common/196 subproject with features/subprojects/sub/meson.build create mode 100644 meson/test cases/common/197 function attributes/meson.build create mode 100644 meson/test cases/common/197 function attributes/meson_options.txt create mode 100644 meson/test cases/common/197 function attributes/test.json create mode 100644 meson/test cases/common/198 broken subproject/meson.build create mode 100644 meson/test cases/common/198 broken subproject/subprojects/broken/broken.c create mode 100644 meson/test cases/common/198 broken subproject/subprojects/broken/meson.build create mode 100644 meson/test cases/common/199 argument syntax/meson.build create mode 100644 meson/test cases/common/2 cpp/VERSIONFILE create mode 100644 meson/test cases/common/2 cpp/cpp.C create mode 100644 meson/test cases/common/2 cpp/meson.build create mode 100644 meson/test cases/common/2 cpp/something.txt create mode 100644 meson/test cases/common/2 cpp/trivial.cc create mode 100644 meson/test cases/common/20 global arg/meson.build create mode 100644 meson/test cases/common/20 global arg/prog.c create mode 100644 meson/test cases/common/20 global arg/prog.cc create mode 100644 meson/test cases/common/200 install name_prefix name_suffix/libfile.c create mode 100644 meson/test cases/common/200 install name_prefix name_suffix/meson.build create mode 100644 meson/test cases/common/200 install name_prefix name_suffix/test.json create mode 100644 meson/test cases/common/201 kwarg entry/inc/prog.h create mode 100644 meson/test cases/common/201 kwarg entry/meson.build create mode 100644 meson/test cases/common/201 kwarg entry/prog.c create mode 100644 meson/test cases/common/201 kwarg entry/test.json create mode 100644 meson/test cases/common/202 custom target build by default/docgen.py create mode 100644 meson/test cases/common/202 custom target build by default/meson.build create mode 100644 meson/test cases/common/202 custom target build by default/test.json create mode 100644 meson/test cases/common/203 find_library and headers/foo.h create mode 100644 meson/test cases/common/203 find_library and headers/meson.build create mode 100644 meson/test cases/common/204 line continuation/meson.build create mode 100644 meson/test cases/common/205 native file path override/main.cpp create mode 100644 meson/test cases/common/205 native file path override/meson.build create mode 100644 meson/test cases/common/205 native file path override/nativefile.ini create mode 100644 meson/test cases/common/205 native file path override/test.json create mode 100644 meson/test cases/common/206 tap tests/cat.c create mode 100644 meson/test cases/common/206 tap tests/issue7515.txt create mode 100644 meson/test cases/common/206 tap tests/meson.build create mode 100644 meson/test cases/common/206 tap tests/tester.c create mode 100644 meson/test cases/common/207 warning level 0/main.cpp create mode 100644 meson/test cases/common/207 warning level 0/meson.build create mode 100755 meson/test cases/common/208 link custom/custom_stlib.py create mode 100644 meson/test cases/common/208 link custom/custom_target.c create mode 100644 meson/test cases/common/208 link custom/custom_target.py create mode 100644 meson/test cases/common/208 link custom/dummy.c create mode 100644 meson/test cases/common/208 link custom/lib.c create mode 100644 meson/test cases/common/208 link custom/meson.build create mode 100644 meson/test cases/common/208 link custom/outerlib.c create mode 100644 meson/test cases/common/208 link custom/prog.c create mode 100644 meson/test cases/common/209 link custom_i single from multiple/generate_conflicting_stlibs.py create mode 100644 meson/test cases/common/209 link custom_i single from multiple/meson.build create mode 100644 meson/test cases/common/209 link custom_i single from multiple/prog.c create mode 100644 meson/test cases/common/21 target arg/func.c create mode 100644 meson/test cases/common/21 target arg/func2.c create mode 100644 meson/test cases/common/21 target arg/meson.build create mode 100644 meson/test cases/common/21 target arg/prog.cc create mode 100644 meson/test cases/common/21 target arg/prog2.cc create mode 100644 meson/test cases/common/210 link custom_i multiple from multiple/generate_stlibs.py create mode 100644 meson/test cases/common/210 link custom_i multiple from multiple/meson.build create mode 100644 meson/test cases/common/210 link custom_i multiple from multiple/prog.c create mode 100644 meson/test cases/common/211 dependency get_variable method/meson.build create mode 100644 meson/test cases/common/212 source set configuration_data/a.c create mode 100644 meson/test cases/common/212 source set configuration_data/all.h create mode 100644 meson/test cases/common/212 source set configuration_data/f.c create mode 100644 meson/test cases/common/212 source set configuration_data/g.c create mode 100644 meson/test cases/common/212 source set configuration_data/meson.build create mode 100644 meson/test cases/common/212 source set configuration_data/nope.c create mode 100644 meson/test cases/common/212 source set configuration_data/subdir/b.c create mode 100644 meson/test cases/common/212 source set configuration_data/subdir/meson.build create mode 100644 meson/test cases/common/213 source set dictionary/a.c create mode 100644 meson/test cases/common/213 source set dictionary/all.h create mode 100644 meson/test cases/common/213 source set dictionary/f.c create mode 100644 meson/test cases/common/213 source set dictionary/g.c create mode 100644 meson/test cases/common/213 source set dictionary/meson.build create mode 100644 meson/test cases/common/213 source set dictionary/nope.c create mode 100644 meson/test cases/common/213 source set dictionary/subdir/b.c create mode 100644 meson/test cases/common/213 source set dictionary/subdir/meson.build create mode 100644 meson/test cases/common/214 source set custom target/a.c create mode 100644 meson/test cases/common/214 source set custom target/all.h create mode 100644 meson/test cases/common/214 source set custom target/cp.py create mode 100644 meson/test cases/common/214 source set custom target/f.c create mode 100644 meson/test cases/common/214 source set custom target/g.c create mode 100644 meson/test cases/common/214 source set custom target/meson.build create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/aarch64.cc create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/arm.cc create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/arm.h create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/arm32.cc create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/versatilepb.cc create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/virt.cc create mode 100644 meson/test cases/common/215 source set realistic example/boards/arm/xlnx_zcu102.cc create mode 100644 meson/test cases/common/215 source set realistic example/boards/meson.build create mode 100644 meson/test cases/common/215 source set realistic example/boards/x86/pc.cc create mode 100644 meson/test cases/common/215 source set realistic example/common.h create mode 100644 meson/test cases/common/215 source set realistic example/config/aarch64 create mode 100644 meson/test cases/common/215 source set realistic example/config/arm create mode 100644 meson/test cases/common/215 source set realistic example/config/x86 create mode 100644 meson/test cases/common/215 source set realistic example/devices/meson.build create mode 100644 meson/test cases/common/215 source set realistic example/devices/virtio-mmio.cc create mode 100644 meson/test cases/common/215 source set realistic example/devices/virtio-pci.cc create mode 100644 meson/test cases/common/215 source set realistic example/devices/virtio.cc create mode 100644 meson/test cases/common/215 source set realistic example/devices/virtio.h create mode 100644 meson/test cases/common/215 source set realistic example/main.cc create mode 100644 meson/test cases/common/215 source set realistic example/meson.build create mode 100644 meson/test cases/common/215 source set realistic example/not-found.cc create mode 100644 meson/test cases/common/215 source set realistic example/was-found.cc create mode 100644 meson/test cases/common/215 source set realistic example/zlib.cc create mode 100644 meson/test cases/common/216 custom target input extracted objects/check_object.py create mode 100644 meson/test cases/common/216 custom target input extracted objects/libdir/meson.build create mode 100644 meson/test cases/common/216 custom target input extracted objects/libdir/source.c create mode 100644 meson/test cases/common/216 custom target input extracted objects/meson.build create mode 100644 meson/test cases/common/217 test priorities/meson.build create mode 100644 meson/test cases/common/217 test priorities/testprog.py create mode 100644 meson/test cases/common/218 include_dir dot/meson.build create mode 100644 meson/test cases/common/218 include_dir dot/rone.h create mode 100644 meson/test cases/common/218 include_dir dot/src/main.c create mode 100644 meson/test cases/common/218 include_dir dot/src/meson.build create mode 100644 meson/test cases/common/218 include_dir dot/src/rone.c create mode 100644 meson/test cases/common/219 include_type dependency/main.cpp create mode 100644 meson/test cases/common/219 include_type dependency/meson.build create mode 100644 meson/test cases/common/219 include_type dependency/pch/test.hpp create mode 100644 meson/test cases/common/219 include_type dependency/subprojects/subDep/meson.build create mode 100644 meson/test cases/common/22 object extraction/check-obj.py create mode 100644 meson/test cases/common/22 object extraction/header.h create mode 100644 meson/test cases/common/22 object extraction/lib.c create mode 100644 meson/test cases/common/22 object extraction/lib2.c create mode 100644 meson/test cases/common/22 object extraction/main.c create mode 100644 meson/test cases/common/22 object extraction/meson.build create mode 100644 meson/test cases/common/22 object extraction/src/lib.c create mode 100644 meson/test cases/common/220 fs module/meson.build create mode 100644 meson/test cases/common/220 fs module/subdir/meson.build create mode 100644 meson/test cases/common/220 fs module/subdir/subdirfile.txt create mode 100644 meson/test cases/common/220 fs module/subprojects/subbie/meson.build create mode 100644 meson/test cases/common/220 fs module/subprojects/subbie/subprojectfile.txt create mode 100644 meson/test cases/common/220 fs module/subprojects/subbie/subsub/meson.build create mode 100644 meson/test cases/common/220 fs module/subprojects/subbie/subsub/subsubfile.txt create mode 100644 meson/test cases/common/221 zlib/meson.build create mode 100644 meson/test cases/common/222 native prop/crossfile.ini create mode 100644 meson/test cases/common/222 native prop/meson.build create mode 100644 meson/test cases/common/222 native prop/nativefile.ini create mode 100644 meson/test cases/common/223 persubproject options/foo.c create mode 100644 meson/test cases/common/223 persubproject options/meson.build create mode 100644 meson/test cases/common/223 persubproject options/subprojects/sub1/foo.c create mode 100644 meson/test cases/common/223 persubproject options/subprojects/sub1/meson.build create mode 100644 meson/test cases/common/223 persubproject options/subprojects/sub2/foo.c create mode 100644 meson/test cases/common/223 persubproject options/subprojects/sub2/meson.build create mode 100644 meson/test cases/common/223 persubproject options/test.json create mode 100644 meson/test cases/common/224 arithmetic operators/meson.build create mode 100644 meson/test cases/common/225 link language/c_linkage.cpp create mode 100644 meson/test cases/common/225 link language/c_linkage.h create mode 100644 meson/test cases/common/225 link language/lib.cpp create mode 100644 meson/test cases/common/225 link language/main.c create mode 100644 meson/test cases/common/225 link language/meson.build create mode 100644 meson/test cases/common/226 link depends indexed custom target/check_arch.py create mode 100644 meson/test cases/common/226 link depends indexed custom target/foo.c create mode 100644 meson/test cases/common/226 link depends indexed custom target/make_file.py create mode 100644 meson/test cases/common/226 link depends indexed custom target/meson.build create mode 100755 meson/test cases/common/227 very long commmand line/codegen.py create mode 100644 meson/test cases/common/227 very long commmand line/main.c create mode 100644 meson/test cases/common/227 very long commmand line/meson.build create mode 100755 meson/test cases/common/227 very long commmand line/name_gen.py create mode 100644 meson/test cases/common/228 custom_target source/a create mode 100644 meson/test cases/common/228 custom_target source/meson.build create mode 100644 meson/test cases/common/228 custom_target source/x.py create mode 100644 meson/test cases/common/229 disabler array addition/meson.build create mode 100644 meson/test cases/common/229 disabler array addition/test.c create mode 100644 meson/test cases/common/23 endian/meson.build create mode 100644 meson/test cases/common/23 endian/prog.c create mode 100644 meson/test cases/common/230 external project/app.c create mode 100644 meson/test cases/common/230 external project/func.c create mode 100644 meson/test cases/common/230 external project/func.h create mode 100755 meson/test cases/common/230 external project/libfoo/configure create mode 100644 meson/test cases/common/230 external project/libfoo/libfoo.c create mode 100644 meson/test cases/common/230 external project/libfoo/libfoo.h create mode 100644 meson/test cases/common/230 external project/libfoo/meson.build create mode 100644 meson/test cases/common/230 external project/meson.build create mode 100644 meson/test cases/common/230 external project/test.json create mode 100644 meson/test cases/common/231 subdir files/meson.build create mode 100644 meson/test cases/common/231 subdir files/subdir/meson.build create mode 100644 meson/test cases/common/231 subdir files/subdir/prog.c create mode 100644 meson/test cases/common/232 dependency allow_fallback/meson.build create mode 100644 meson/test cases/common/232 dependency allow_fallback/subprojects/foob/meson.build create mode 100644 meson/test cases/common/232 dependency allow_fallback/subprojects/foob3/meson.build create mode 100644 meson/test cases/common/233 wrap case/meson.build create mode 100644 meson/test cases/common/233 wrap case/prog.c create mode 100644 meson/test cases/common/233 wrap case/subprojects/up_down.wrap create mode 100644 meson/test cases/common/233 wrap case/subprojects/up_down/meson.build create mode 100644 meson/test cases/common/233 wrap case/subprojects/up_down/up_down.h create mode 100644 meson/test cases/common/234 get_file_contents/.gitattributes create mode 100644 meson/test cases/common/234 get_file_contents/VERSION create mode 100644 meson/test cases/common/234 get_file_contents/meson.build create mode 100644 meson/test cases/common/234 get_file_contents/other/meson.build create mode 100644 meson/test cases/common/234 get_file_contents/utf-16-text create mode 100644 meson/test cases/common/235 invalid standard overriden to valid/main.c create mode 100644 meson/test cases/common/235 invalid standard overriden to valid/meson.build create mode 100644 meson/test cases/common/235 invalid standard overriden to valid/test.json create mode 100644 meson/test cases/common/236 proper args splitting/main.c create mode 100644 meson/test cases/common/236 proper args splitting/meson.build create mode 100644 meson/test cases/common/236 proper args splitting/test.json create mode 100644 meson/test cases/common/237 fstrings/meson.build create mode 100644 meson/test cases/common/238 dependency include_type inconsistency/bar/meson.build create mode 100644 meson/test cases/common/238 dependency include_type inconsistency/meson.build create mode 100644 meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz.wrap create mode 100644 meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz/meson.build create mode 100644 meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo.wrap create mode 100644 meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo/meson.build create mode 100644 meson/test cases/common/239 includedir violation/meson.build create mode 100644 meson/test cases/common/239 includedir violation/subprojects/sub/include/placeholder.h create mode 100644 meson/test cases/common/239 includedir violation/subprojects/sub/meson.build create mode 100644 meson/test cases/common/239 includedir violation/test.json create mode 100644 meson/test cases/common/24 library versions/lib.c create mode 100644 meson/test cases/common/24 library versions/meson.build create mode 100644 meson/test cases/common/24 library versions/subdir/meson.build create mode 100644 meson/test cases/common/24 library versions/test.json create mode 100644 meson/test cases/common/240 dependency native host == build/meson.build create mode 100644 meson/test cases/common/240 dependency native host == build/test.json create mode 100644 meson/test cases/common/241 set and get variable/meson.build create mode 100644 meson/test cases/common/241 set and get variable/test1.txt create mode 100644 meson/test cases/common/241 set and get variable/test2.txt create mode 100644 meson/test cases/common/242 custom target feed/data_source.txt create mode 100644 meson/test cases/common/242 custom target feed/meson.build create mode 100755 meson/test cases/common/242 custom target feed/my_compiler.py create mode 100644 meson/test cases/common/242 custom target feed/test.json create mode 100644 meson/test cases/common/243 escape++/meson.build create mode 100644 meson/test cases/common/243 escape++/test.c create mode 100644 meson/test cases/common/25 config subdir/include/config.h.in create mode 100644 meson/test cases/common/25 config subdir/include/meson.build create mode 100644 meson/test cases/common/25 config subdir/meson.build create mode 100644 meson/test cases/common/25 config subdir/src/meson.build create mode 100644 meson/test cases/common/25 config subdir/src/prog.c create mode 100644 meson/test cases/common/26 find program/meson.build create mode 100644 meson/test cases/common/26 find program/print-version-with-prefix.py create mode 100644 meson/test cases/common/26 find program/print-version.py create mode 100644 meson/test cases/common/26 find program/scripts/test_subdir.py create mode 100644 meson/test cases/common/26 find program/source.in create mode 100644 meson/test cases/common/27 multiline string/meson.build create mode 100644 meson/test cases/common/28 try compile/invalid.c create mode 100644 meson/test cases/common/28 try compile/meson.build create mode 100644 meson/test cases/common/28 try compile/valid.c create mode 100644 meson/test cases/common/29 compiler id/meson.build create mode 100644 meson/test cases/common/3 static/libfile.c create mode 100644 meson/test cases/common/3 static/libfile2.c create mode 100644 meson/test cases/common/3 static/meson.build create mode 100644 meson/test cases/common/3 static/meson_options.txt create mode 100644 meson/test cases/common/30 sizeof/config.h.in create mode 100644 meson/test cases/common/30 sizeof/meson.build create mode 100644 meson/test cases/common/30 sizeof/prog.c.in create mode 100644 meson/test cases/common/31 define10/config.h.in create mode 100644 meson/test cases/common/31 define10/meson.build create mode 100644 meson/test cases/common/31 define10/prog.c create mode 100644 meson/test cases/common/32 has header/meson.build create mode 100644 meson/test cases/common/32 has header/ouagadougou.h create mode 100644 meson/test cases/common/33 run program/check-env.py create mode 100644 meson/test cases/common/33 run program/get-version.py create mode 100644 meson/test cases/common/33 run program/meson.build create mode 100644 meson/test cases/common/33 run program/scripts/hello.bat create mode 100755 meson/test cases/common/33 run program/scripts/hello.sh create mode 100644 meson/test cases/common/34 logic ops/meson.build create mode 100644 meson/test cases/common/35 string operations/meson.build create mode 100644 meson/test cases/common/36 has function/meson.build create mode 100644 meson/test cases/common/37 has member/meson.build create mode 100644 meson/test cases/common/38 alignment/meson.build create mode 100644 meson/test cases/common/39 library chain/main.c create mode 100644 meson/test cases/common/39 library chain/meson.build create mode 100644 meson/test cases/common/39 library chain/subdir/lib1.c create mode 100644 meson/test cases/common/39 library chain/subdir/meson.build create mode 100644 meson/test cases/common/39 library chain/subdir/subdir2/lib2.c create mode 100644 meson/test cases/common/39 library chain/subdir/subdir2/meson.build create mode 100644 meson/test cases/common/39 library chain/subdir/subdir3/lib3.c create mode 100644 meson/test cases/common/39 library chain/subdir/subdir3/meson.build create mode 100644 meson/test cases/common/39 library chain/test.json create mode 100644 meson/test cases/common/4 shared/libfile.c create mode 100644 meson/test cases/common/4 shared/meson.build create mode 100644 meson/test cases/common/40 options/meson.build create mode 100644 meson/test cases/common/40 options/meson_options.txt create mode 100644 meson/test cases/common/41 test args/cmd_args.c create mode 100644 meson/test cases/common/41 test args/copyfile.py create mode 100644 meson/test cases/common/41 test args/env2vars.c create mode 100644 meson/test cases/common/41 test args/envvars.c create mode 100644 meson/test cases/common/41 test args/meson.build create mode 100644 meson/test cases/common/41 test args/tester.c create mode 100755 meson/test cases/common/41 test args/tester.py create mode 100644 meson/test cases/common/41 test args/testfile.txt create mode 100644 meson/test cases/common/42 subproject/meson.build create mode 100644 meson/test cases/common/42 subproject/subprojects/sublib/include/subdefs.h create mode 100644 meson/test cases/common/42 subproject/subprojects/sublib/meson.build create mode 100644 meson/test cases/common/42 subproject/subprojects/sublib/simpletest.c create mode 100644 meson/test cases/common/42 subproject/subprojects/sublib/sublib.c create mode 100644 meson/test cases/common/42 subproject/test.json create mode 100644 meson/test cases/common/42 subproject/user.c create mode 100644 meson/test cases/common/43 subproject options/meson.build create mode 100644 meson/test cases/common/43 subproject options/meson_options.txt create mode 100644 meson/test cases/common/43 subproject options/subprojects/subproject/meson.build create mode 100644 meson/test cases/common/43 subproject options/subprojects/subproject/meson_options.txt create mode 100644 meson/test cases/common/44 pkgconfig-gen/dependencies/custom.c create mode 100644 meson/test cases/common/44 pkgconfig-gen/dependencies/exposed.c create mode 100644 meson/test cases/common/44 pkgconfig-gen/dependencies/internal.c create mode 100644 meson/test cases/common/44 pkgconfig-gen/dependencies/main.c create mode 100644 meson/test cases/common/44 pkgconfig-gen/dependencies/meson.build create mode 100644 meson/test cases/common/44 pkgconfig-gen/meson.build create mode 100644 meson/test cases/common/44 pkgconfig-gen/simple.c create mode 100644 meson/test cases/common/44 pkgconfig-gen/simple.h create mode 100644 meson/test cases/common/44 pkgconfig-gen/simple5.c create mode 100644 meson/test cases/common/44 pkgconfig-gen/test.json create mode 100644 meson/test cases/common/45 custom install dirs/datafile.cat create mode 100644 meson/test cases/common/45 custom install dirs/meson.build create mode 100644 meson/test cases/common/45 custom install dirs/prog.1 create mode 100644 meson/test cases/common/45 custom install dirs/prog.c create mode 100644 meson/test cases/common/45 custom install dirs/sample.h create mode 100644 meson/test cases/common/45 custom install dirs/subdir/datafile.dog create mode 100644 meson/test cases/common/45 custom install dirs/test.json create mode 100644 meson/test cases/common/46 subproject subproject/meson.build create mode 100644 meson/test cases/common/46 subproject subproject/prog.c create mode 100644 meson/test cases/common/46 subproject subproject/subprojects/a/a.c create mode 100644 meson/test cases/common/46 subproject subproject/subprojects/a/meson.build create mode 100644 meson/test cases/common/46 subproject subproject/subprojects/b/b.c create mode 100644 meson/test cases/common/46 subproject subproject/subprojects/b/meson.build create mode 100644 meson/test cases/common/46 subproject subproject/subprojects/c/meson.build create mode 100644 meson/test cases/common/47 same file name/d1/file.c create mode 100644 meson/test cases/common/47 same file name/d2/file.c create mode 100644 meson/test cases/common/47 same file name/meson.build create mode 100644 meson/test cases/common/47 same file name/prog.c create mode 100644 meson/test cases/common/48 file grabber/a.c create mode 100644 meson/test cases/common/48 file grabber/b.c create mode 100644 meson/test cases/common/48 file grabber/c.c create mode 100644 meson/test cases/common/48 file grabber/grabber.bat create mode 100755 meson/test cases/common/48 file grabber/grabber.sh create mode 100644 meson/test cases/common/48 file grabber/grabber2.bat create mode 100644 meson/test cases/common/48 file grabber/meson.build create mode 100644 meson/test cases/common/48 file grabber/prog.c create mode 100644 meson/test cases/common/48 file grabber/subdir/meson.build create mode 100644 meson/test cases/common/48 file grabber/subdir/suba.c create mode 100644 meson/test cases/common/48 file grabber/subdir/subb.c create mode 100644 meson/test cases/common/48 file grabber/subdir/subc.c create mode 100644 meson/test cases/common/48 file grabber/subdir/subprog.c create mode 100644 meson/test cases/common/49 custom target/data_source.txt create mode 100755 meson/test cases/common/49 custom target/depfile/dep.py create mode 100644 meson/test cases/common/49 custom target/depfile/meson.build create mode 100644 meson/test cases/common/49 custom target/meson.build create mode 100755 meson/test cases/common/49 custom target/my_compiler.py create mode 100644 meson/test cases/common/49 custom target/test.json create mode 100644 meson/test cases/common/5 linkstatic/libfile.c create mode 100644 meson/test cases/common/5 linkstatic/libfile2.c create mode 100644 meson/test cases/common/5 linkstatic/libfile3.c create mode 100644 meson/test cases/common/5 linkstatic/libfile4.c create mode 100644 meson/test cases/common/5 linkstatic/main.c create mode 100644 meson/test cases/common/5 linkstatic/meson.build create mode 100644 meson/test cases/common/50 custom target chain/data_source.txt create mode 100644 meson/test cases/common/50 custom target chain/meson.build create mode 100755 meson/test cases/common/50 custom target chain/my_compiler.py create mode 100755 meson/test cases/common/50 custom target chain/my_compiler2.py create mode 100644 meson/test cases/common/50 custom target chain/test.json create mode 100644 meson/test cases/common/50 custom target chain/usetarget/meson.build create mode 100644 meson/test cases/common/50 custom target chain/usetarget/myexe.c create mode 100755 meson/test cases/common/50 custom target chain/usetarget/subcomp.py create mode 100644 meson/test cases/common/51 run target/check-env.py create mode 100755 meson/test cases/common/51 run target/check_exists.py create mode 100755 meson/test cases/common/51 run target/configure.in create mode 100644 meson/test cases/common/51 run target/converter.py create mode 100755 meson/test cases/common/51 run target/fakeburner.py create mode 100644 meson/test cases/common/51 run target/helloprinter.c create mode 100644 meson/test cases/common/51 run target/meson.build create mode 100644 meson/test cases/common/51 run target/subdir/textprinter.py create mode 100644 meson/test cases/common/52 object generator/meson.build create mode 100755 meson/test cases/common/52 object generator/obj_generator.py create mode 100644 meson/test cases/common/52 object generator/prog.c create mode 100644 meson/test cases/common/52 object generator/source.c create mode 100644 meson/test cases/common/52 object generator/source2.c create mode 100644 meson/test cases/common/52 object generator/source3.c create mode 100755 meson/test cases/common/53 install script/customtarget.py create mode 100644 meson/test cases/common/53 install script/meson.build create mode 100755 meson/test cases/common/53 install script/myinstall.py create mode 100644 meson/test cases/common/53 install script/prog.c create mode 100644 meson/test cases/common/53 install script/src/a file.txt create mode 100644 meson/test cases/common/53 install script/src/foo.c create mode 100644 meson/test cases/common/53 install script/src/meson.build create mode 100644 meson/test cases/common/53 install script/src/myinstall.py create mode 100644 meson/test cases/common/53 install script/test.json create mode 100755 meson/test cases/common/54 custom target source output/generator.py create mode 100644 meson/test cases/common/54 custom target source output/main.c create mode 100644 meson/test cases/common/54 custom target source output/meson.build create mode 100644 meson/test cases/common/55 exe static shared/meson.build create mode 100644 meson/test cases/common/55 exe static shared/prog.c create mode 100644 meson/test cases/common/55 exe static shared/shlib2.c create mode 100644 meson/test cases/common/55 exe static shared/stat.c create mode 100644 meson/test cases/common/55 exe static shared/stat2.c create mode 100644 meson/test cases/common/55 exe static shared/subdir/exports.h create mode 100644 meson/test cases/common/55 exe static shared/subdir/meson.build create mode 100644 meson/test cases/common/55 exe static shared/subdir/shlib.c create mode 100644 meson/test cases/common/56 array methods/a.txt create mode 100644 meson/test cases/common/56 array methods/b.txt create mode 100644 meson/test cases/common/56 array methods/c.txt create mode 100644 meson/test cases/common/56 array methods/meson.build create mode 100644 meson/test cases/common/57 custom header generator/input.def create mode 100644 meson/test cases/common/57 custom header generator/makeheader.py create mode 100644 meson/test cases/common/57 custom header generator/meson.build create mode 100644 meson/test cases/common/57 custom header generator/prog.c create mode 100644 meson/test cases/common/57 custom header generator/somefile.txt create mode 100644 meson/test cases/common/58 multiple generators/data2.dat create mode 100644 meson/test cases/common/58 multiple generators/main.cpp create mode 100644 meson/test cases/common/58 multiple generators/meson.build create mode 100755 meson/test cases/common/58 multiple generators/mygen.py create mode 100644 meson/test cases/common/58 multiple generators/subdir/data.dat create mode 100644 meson/test cases/common/58 multiple generators/subdir/meson.build create mode 100644 meson/test cases/common/59 install subdir/meson.build create mode 100644 meson/test cases/common/59 install subdir/nested_elided/sub/dircheck/ninth.dat create mode 100644 meson/test cases/common/59 install subdir/nested_elided/sub/eighth.dat create mode 100644 meson/test cases/common/59 install subdir/sub/sub1/third.dat create mode 100644 meson/test cases/common/59 install subdir/sub1/second.dat create mode 100644 meson/test cases/common/59 install subdir/sub2/dircheck/excluded-three.dat create mode 100644 meson/test cases/common/59 install subdir/sub2/excluded-three.dat create mode 100644 meson/test cases/common/59 install subdir/sub2/excluded/two.dat create mode 100644 meson/test cases/common/59 install subdir/sub2/one.dat create mode 100644 meson/test cases/common/59 install subdir/sub_elided/dircheck/fifth.dat create mode 100644 meson/test cases/common/59 install subdir/sub_elided/fourth.dat create mode 100644 meson/test cases/common/59 install subdir/subdir/meson.build create mode 100644 meson/test cases/common/59 install subdir/subdir/sub1/data1.dat create mode 100644 meson/test cases/common/59 install subdir/subdir/sub1/sub2/data2.dat create mode 100644 meson/test cases/common/59 install subdir/subdir/sub_elided/dircheck/seventh.dat create mode 100644 meson/test cases/common/59 install subdir/subdir/sub_elided/sixth.dat create mode 100644 meson/test cases/common/59 install subdir/test.json create mode 100644 meson/test cases/common/6 linkshared/cpplib.cpp create mode 100644 meson/test cases/common/6 linkshared/cpplib.h create mode 100644 meson/test cases/common/6 linkshared/cppmain.cpp create mode 100644 meson/test cases/common/6 linkshared/libfile.c create mode 100644 meson/test cases/common/6 linkshared/main.c create mode 100644 meson/test cases/common/6 linkshared/meson.build create mode 100644 meson/test cases/common/6 linkshared/test.json create mode 100644 meson/test cases/common/60 foreach/meson.build create mode 100644 meson/test cases/common/60 foreach/prog1.c create mode 100644 meson/test cases/common/60 foreach/prog2.c create mode 100644 meson/test cases/common/60 foreach/prog3.c create mode 100644 meson/test cases/common/60 foreach/test.json create mode 100644 meson/test cases/common/61 number arithmetic/meson.build create mode 100644 meson/test cases/common/62 string arithmetic/meson.build create mode 100644 meson/test cases/common/63 array arithmetic/meson.build create mode 100644 meson/test cases/common/64 arithmetic bidmas/meson.build create mode 100644 meson/test cases/common/65 build always/main.c create mode 100644 meson/test cases/common/65 build always/meson.build create mode 100644 meson/test cases/common/65 build always/version.c.in create mode 100644 meson/test cases/common/65 build always/version.h create mode 100755 meson/test cases/common/65 build always/version_gen.py create mode 100644 meson/test cases/common/66 vcstag/meson.build create mode 100644 meson/test cases/common/66 vcstag/tagprog.c create mode 100644 meson/test cases/common/66 vcstag/vcstag.c.in create mode 100644 meson/test cases/common/67 modules/meson.build create mode 100644 meson/test cases/common/67 modules/meson_options.txt create mode 100644 meson/test cases/common/68 should fail/failing.c create mode 100644 meson/test cases/common/68 should fail/meson.build create mode 100644 meson/test cases/common/69 configure file in custom target/inc/confdata.in create mode 100644 meson/test cases/common/69 configure file in custom target/inc/meson.build create mode 100644 meson/test cases/common/69 configure file in custom target/meson.build create mode 100644 meson/test cases/common/69 configure file in custom target/src/meson.build create mode 100644 meson/test cases/common/69 configure file in custom target/src/mycompiler.py create mode 100644 meson/test cases/common/7 mixed/func.c create mode 100644 meson/test cases/common/7 mixed/main.cc create mode 100644 meson/test cases/common/7 mixed/meson.build create mode 100644 meson/test cases/common/70 external test program/meson.build create mode 100755 meson/test cases/common/70 external test program/mytest.py create mode 100755 meson/test cases/common/71 ctarget dependency/gen1.py create mode 100755 meson/test cases/common/71 ctarget dependency/gen2.py create mode 100644 meson/test cases/common/71 ctarget dependency/input.dat create mode 100644 meson/test cases/common/71 ctarget dependency/meson.build create mode 100644 meson/test cases/common/72 shared subproject/a.c create mode 100644 meson/test cases/common/72 shared subproject/meson.build create mode 100644 meson/test cases/common/72 shared subproject/subprojects/B/b.c create mode 100644 meson/test cases/common/72 shared subproject/subprojects/B/meson.build create mode 100644 meson/test cases/common/72 shared subproject/subprojects/C/c.c create mode 100644 meson/test cases/common/72 shared subproject/subprojects/C/meson.build create mode 100644 meson/test cases/common/73 shared subproject 2/a.c create mode 100644 meson/test cases/common/73 shared subproject 2/meson.build create mode 100644 meson/test cases/common/73 shared subproject 2/subprojects/B/b.c create mode 100644 meson/test cases/common/73 shared subproject 2/subprojects/B/meson.build create mode 100644 meson/test cases/common/73 shared subproject 2/subprojects/C/c.c create mode 100644 meson/test cases/common/73 shared subproject 2/subprojects/C/meson.build create mode 100644 meson/test cases/common/74 file object/lib.c create mode 100644 meson/test cases/common/74 file object/meson.build create mode 100644 meson/test cases/common/74 file object/prog.c create mode 100644 meson/test cases/common/74 file object/subdir1/lib.c create mode 100644 meson/test cases/common/74 file object/subdir1/meson.build create mode 100644 meson/test cases/common/74 file object/subdir1/prog.c create mode 100644 meson/test cases/common/74 file object/subdir2/lib.c create mode 100644 meson/test cases/common/74 file object/subdir2/meson.build create mode 100644 meson/test cases/common/74 file object/subdir2/prog.c create mode 100644 meson/test cases/common/75 custom subproject dir/a.c create mode 100644 meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/b.c create mode 100644 meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/meson.build create mode 100644 meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/c.c create mode 100644 meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/meson.build create mode 100644 meson/test cases/common/75 custom subproject dir/meson.build create mode 100644 meson/test cases/common/76 has type/meson.build create mode 100644 meson/test cases/common/77 extract from nested subdir/meson.build create mode 100644 meson/test cases/common/77 extract from nested subdir/src/first/lib_first.c create mode 100644 meson/test cases/common/77 extract from nested subdir/src/first/meson.build create mode 100644 meson/test cases/common/77 extract from nested subdir/src/meson.build create mode 100644 meson/test cases/common/77 extract from nested subdir/tst/first/exe_first.c create mode 100644 meson/test cases/common/77 extract from nested subdir/tst/first/meson.build create mode 100644 meson/test cases/common/77 extract from nested subdir/tst/meson.build create mode 100644 meson/test cases/common/78 internal dependency/meson.build create mode 100644 meson/test cases/common/78 internal dependency/proj1/include/proj1.h create mode 100644 meson/test cases/common/78 internal dependency/proj1/meson.build create mode 100644 meson/test cases/common/78 internal dependency/proj1/proj1f1.c create mode 100644 meson/test cases/common/78 internal dependency/proj1/proj1f2.c create mode 100644 meson/test cases/common/78 internal dependency/proj1/proj1f3.c create mode 100644 meson/test cases/common/78 internal dependency/src/main.c create mode 100644 meson/test cases/common/78 internal dependency/src/meson.build create mode 100644 meson/test cases/common/79 same basename/exe1.c create mode 100644 meson/test cases/common/79 same basename/exe2.c create mode 100644 meson/test cases/common/79 same basename/lib.c create mode 100644 meson/test cases/common/79 same basename/meson.build create mode 100644 meson/test cases/common/79 same basename/sharedsub/meson.build create mode 100644 meson/test cases/common/79 same basename/staticsub/meson.build create mode 100755 meson/test cases/common/8 install/gendir.py create mode 100644 meson/test cases/common/8 install/meson.build create mode 100644 meson/test cases/common/8 install/prog.c create mode 100644 meson/test cases/common/8 install/stat.c create mode 100644 meson/test cases/common/8 install/test.json create mode 100644 meson/test cases/common/80 declare dep/entity/entity.h create mode 100644 meson/test cases/common/80 declare dep/entity/entity1.c create mode 100644 meson/test cases/common/80 declare dep/entity/entity2.c create mode 100644 meson/test cases/common/80 declare dep/entity/meson.build create mode 100644 meson/test cases/common/80 declare dep/main.c create mode 100644 meson/test cases/common/80 declare dep/meson.build create mode 100644 meson/test cases/common/81 extract all/extractor.h create mode 100644 meson/test cases/common/81 extract all/four.c create mode 100644 meson/test cases/common/81 extract all/meson.build create mode 100644 meson/test cases/common/81 extract all/one.c create mode 100644 meson/test cases/common/81 extract all/prog.c create mode 100644 meson/test cases/common/81 extract all/three.c create mode 100644 meson/test cases/common/81 extract all/two.c create mode 100644 meson/test cases/common/82 add language/meson.build create mode 100644 meson/test cases/common/82 add language/prog.c create mode 100644 meson/test cases/common/82 add language/prog.cc create mode 100644 meson/test cases/common/83 identical target name in subproject/bar.c create mode 100644 meson/test cases/common/83 identical target name in subproject/meson.build create mode 100644 meson/test cases/common/83 identical target name in subproject/subprojects/foo/bar.c create mode 100644 meson/test cases/common/83 identical target name in subproject/subprojects/foo/meson.build create mode 100644 meson/test cases/common/83 identical target name in subproject/subprojects/foo/true.py create mode 100644 meson/test cases/common/83 identical target name in subproject/true.py create mode 100644 meson/test cases/common/84 plusassign/meson.build create mode 100644 meson/test cases/common/85 skip subdir/meson.build create mode 100644 meson/test cases/common/85 skip subdir/subdir1/meson.build create mode 100644 meson/test cases/common/85 skip subdir/subdir1/subdir2/meson.build create mode 100644 meson/test cases/common/86 private include/meson.build create mode 100755 meson/test cases/common/86 private include/stlib/compiler.py create mode 100644 meson/test cases/common/86 private include/stlib/foo1.def create mode 100644 meson/test cases/common/86 private include/stlib/foo2.def create mode 100644 meson/test cases/common/86 private include/stlib/meson.build create mode 100644 meson/test cases/common/86 private include/user/libuser.c create mode 100644 meson/test cases/common/86 private include/user/meson.build create mode 100644 meson/test cases/common/87 default options/meson.build create mode 100644 meson/test cases/common/87 default options/subprojects/sub1/meson.build create mode 100644 meson/test cases/common/87 default options/subprojects/sub1/meson_options.txt create mode 100644 meson/test cases/common/88 dep fallback/gensrc.py create mode 100644 meson/test cases/common/88 dep fallback/meson.build create mode 100644 meson/test cases/common/88 dep fallback/subprojects/boblib/bob.c create mode 100644 meson/test cases/common/88 dep fallback/subprojects/boblib/bob.h create mode 100644 meson/test cases/common/88 dep fallback/subprojects/boblib/genbob.py create mode 100644 meson/test cases/common/88 dep fallback/subprojects/boblib/meson.build create mode 100644 meson/test cases/common/88 dep fallback/subprojects/dummylib/meson.build create mode 100644 meson/test cases/common/88 dep fallback/tester.c create mode 100644 meson/test cases/common/89 default library/ef.cpp create mode 100644 meson/test cases/common/89 default library/ef.h create mode 100644 meson/test cases/common/89 default library/eftest.cpp create mode 100644 meson/test cases/common/89 default library/meson.build create mode 100644 meson/test cases/common/9 header install/meson.build create mode 100644 meson/test cases/common/9 header install/rootdir.h create mode 100644 meson/test cases/common/9 header install/sub/fileheader.h create mode 100644 meson/test cases/common/9 header install/sub/meson.build create mode 100644 meson/test cases/common/9 header install/subdir.h create mode 100644 meson/test cases/common/9 header install/test.json create mode 100644 meson/test cases/common/9 header install/vanishing_subdir/meson.build create mode 100644 meson/test cases/common/9 header install/vanishing_subdir/vanished.h create mode 100644 meson/test cases/common/90 gen extra/meson.build create mode 100644 meson/test cases/common/90 gen extra/name.dat create mode 100644 meson/test cases/common/90 gen extra/name.l create mode 100644 meson/test cases/common/90 gen extra/plain.c create mode 100755 meson/test cases/common/90 gen extra/srcgen.py create mode 100644 meson/test cases/common/90 gen extra/srcgen2.py create mode 100644 meson/test cases/common/90 gen extra/srcgen3.py create mode 100644 meson/test cases/common/90 gen extra/upper.c create mode 100644 meson/test cases/common/91 benchmark/delayer.c create mode 100644 meson/test cases/common/91 benchmark/meson.build create mode 100644 meson/test cases/common/92 test workdir/meson.build create mode 100644 meson/test cases/common/92 test workdir/opener.c create mode 100755 meson/test cases/common/92 test workdir/subdir/checker.py create mode 100644 meson/test cases/common/92 test workdir/subdir/meson.build create mode 100644 meson/test cases/common/93 suites/exe1.c create mode 100644 meson/test cases/common/93 suites/exe2.c create mode 100644 meson/test cases/common/93 suites/meson.build create mode 100644 meson/test cases/common/93 suites/subprojects/sub/meson.build create mode 100644 meson/test cases/common/93 suites/subprojects/sub/sub1.c create mode 100644 meson/test cases/common/93 suites/subprojects/sub/sub2.c create mode 100644 meson/test cases/common/94 threads/meson.build create mode 100644 meson/test cases/common/94 threads/threadprog.c create mode 100644 meson/test cases/common/94 threads/threadprog.cpp create mode 100644 meson/test cases/common/95 manygen/depuser.c create mode 100644 meson/test cases/common/95 manygen/meson.build create mode 100644 meson/test cases/common/95 manygen/subdir/funcinfo.def create mode 100755 meson/test cases/common/95 manygen/subdir/manygen.py create mode 100644 meson/test cases/common/95 manygen/subdir/meson.build create mode 100644 meson/test cases/common/96 stringdef/meson.build create mode 100644 meson/test cases/common/96 stringdef/stringdef.c create mode 100644 meson/test cases/common/97 find program path/meson.build create mode 100755 meson/test cases/common/97 find program path/program.py create mode 100644 meson/test cases/common/98 subproject subdir/meson.build create mode 100644 meson/test cases/common/98 subproject subdir/prog.c create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub/lib/meson.build create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.c create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.h create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub/meson.build create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit.wrap create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson.build create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson_options.txt create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/foo.h create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap create mode 100644 meson/test cases/common/98 subproject subdir/subprojects/sub_novar/meson.build create mode 100644 meson/test cases/common/98 subproject subdir/test.json create mode 100644 meson/test cases/common/99 postconf/meson.build create mode 100644 meson/test cases/common/99 postconf/postconf.py create mode 100644 meson/test cases/common/99 postconf/prog.c create mode 100644 meson/test cases/common/99 postconf/raw.dat create mode 100644 meson/test cases/csharp/1 basic/meson.build create mode 100644 meson/test cases/csharp/1 basic/prog.cs create mode 100644 meson/test cases/csharp/1 basic/test.json create mode 100644 meson/test cases/csharp/1 basic/text.cs create mode 100644 meson/test cases/csharp/2 library/helper.cs create mode 100644 meson/test cases/csharp/2 library/meson.build create mode 100644 meson/test cases/csharp/2 library/prog.cs create mode 100644 meson/test cases/csharp/2 library/test.json create mode 100644 meson/test cases/csharp/3 resource/TestRes.resx create mode 100644 meson/test cases/csharp/3 resource/meson.build create mode 100644 meson/test cases/csharp/3 resource/resprog.cs create mode 100644 meson/test cases/csharp/4 external dep/hello.txt create mode 100644 meson/test cases/csharp/4 external dep/meson.build create mode 100644 meson/test cases/csharp/4 external dep/prog.cs create mode 100644 meson/test cases/csharp/4 external dep/test.json create mode 100644 meson/test cases/cuda/1 simple/meson.build create mode 100644 meson/test cases/cuda/1 simple/prog.cu create mode 100644 meson/test cases/cuda/10 cuda dependency/c/meson.build create mode 100644 meson/test cases/cuda/10 cuda dependency/c/prog.c create mode 100644 meson/test cases/cuda/10 cuda dependency/cpp/meson.build create mode 100644 meson/test cases/cuda/10 cuda dependency/cpp/prog.cc create mode 100644 meson/test cases/cuda/10 cuda dependency/meson.build create mode 100644 meson/test cases/cuda/10 cuda dependency/modules/meson.build create mode 100644 meson/test cases/cuda/10 cuda dependency/modules/prog.cc create mode 100644 meson/test cases/cuda/10 cuda dependency/version_reqs/meson.build create mode 100644 meson/test cases/cuda/10 cuda dependency/version_reqs/prog.cc create mode 100644 meson/test cases/cuda/11 cuda dependency (nvcc)/meson.build create mode 100644 meson/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build create mode 100644 meson/test cases/cuda/11 cuda dependency (nvcc)/modules/prog.cu create mode 100644 meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/meson.build create mode 100644 meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/prog.cu create mode 100644 meson/test cases/cuda/12 cuda dependency (mixed)/kernel.cu create mode 100644 meson/test cases/cuda/12 cuda dependency (mixed)/meson.build create mode 100644 meson/test cases/cuda/12 cuda dependency (mixed)/prog.cpp create mode 100644 meson/test cases/cuda/13 cuda compiler setting/meson.build create mode 100644 meson/test cases/cuda/13 cuda compiler setting/nativefile.ini create mode 100644 meson/test cases/cuda/13 cuda compiler setting/prog.cu create mode 100644 meson/test cases/cuda/14 cuda has header symbol/meson.build create mode 100644 meson/test cases/cuda/15 sanitizer/meson.build create mode 100644 meson/test cases/cuda/15 sanitizer/prog.cu create mode 100644 meson/test cases/cuda/16 multistd/main.cu create mode 100644 meson/test cases/cuda/16 multistd/meson.build create mode 100644 meson/test cases/cuda/2 split/lib.cu create mode 100644 meson/test cases/cuda/2 split/main.cpp create mode 100644 meson/test cases/cuda/2 split/meson.build create mode 100644 meson/test cases/cuda/2 split/static/lib.cu create mode 100644 meson/test cases/cuda/2 split/static/libsta.cu create mode 100644 meson/test cases/cuda/2 split/static/main_static.cpp create mode 100644 meson/test cases/cuda/2 split/static/meson.build create mode 100644 meson/test cases/cuda/3 cudamodule/meson.build create mode 100644 meson/test cases/cuda/3 cudamodule/prog.cu create mode 100644 meson/test cases/cuda/4 shared/main.cu create mode 100644 meson/test cases/cuda/4 shared/meson.build create mode 100644 meson/test cases/cuda/4 shared/shared/kernels.cu create mode 100644 meson/test cases/cuda/4 shared/shared/kernels.h create mode 100644 meson/test cases/cuda/4 shared/shared/meson.build create mode 100644 meson/test cases/cuda/5 threads/main.cu create mode 100644 meson/test cases/cuda/5 threads/meson.build create mode 100644 meson/test cases/cuda/5 threads/shared/kernels.cu create mode 100644 meson/test cases/cuda/5 threads/shared/kernels.h create mode 100644 meson/test cases/cuda/5 threads/shared/meson.build create mode 100644 meson/test cases/cuda/6 std/main.cu create mode 100644 meson/test cases/cuda/6 std/meson.build create mode 100644 meson/test cases/cuda/7 static vs runtime/main.cu create mode 100644 meson/test cases/cuda/7 static vs runtime/meson.build create mode 100644 meson/test cases/cuda/8 release/main.cu create mode 100644 meson/test cases/cuda/8 release/meson.build create mode 100644 meson/test cases/cuda/9 optimize for space/main.cu create mode 100644 meson/test cases/cuda/9 optimize for space/meson.build create mode 100755 meson/test cases/cython/1 basic/cytest.py create mode 100644 meson/test cases/cython/1 basic/libdir/cstorer.pxd create mode 100644 meson/test cases/cython/1 basic/libdir/meson.build create mode 100644 meson/test cases/cython/1 basic/libdir/storer.c create mode 100644 meson/test cases/cython/1 basic/libdir/storer.h create mode 100644 meson/test cases/cython/1 basic/libdir/storer.pyx create mode 100644 meson/test cases/cython/1 basic/meson.build create mode 100644 meson/test cases/cython/2 generated sources/configure.pyx.in create mode 100644 meson/test cases/cython/2 generated sources/g.in create mode 100644 meson/test cases/cython/2 generated sources/gen.py create mode 100755 meson/test cases/cython/2 generated sources/generator.py create mode 100644 meson/test cases/cython/2 generated sources/libdir/gen.py create mode 100644 meson/test cases/cython/2 generated sources/libdir/meson.build create mode 100644 meson/test cases/cython/2 generated sources/meson.build create mode 100644 meson/test cases/cython/2 generated sources/test.py create mode 100644 meson/test cases/d/1 simple/app.d create mode 100644 meson/test cases/d/1 simple/meson.build create mode 100644 meson/test cases/d/1 simple/test.json create mode 100644 meson/test cases/d/1 simple/utils.d create mode 100644 meson/test cases/d/10 d cpp/cppmain.cpp create mode 100644 meson/test cases/d/10 d cpp/dmain.d create mode 100644 meson/test cases/d/10 d cpp/libfile.cpp create mode 100644 meson/test cases/d/10 d cpp/libfile.d create mode 100644 meson/test cases/d/10 d cpp/meson.build create mode 100644 meson/test cases/d/11 dub/meson.build create mode 100644 meson/test cases/d/11 dub/test.d create mode 100644 meson/test cases/d/2 static library/app.d create mode 100644 meson/test cases/d/2 static library/libstuff.d create mode 100644 meson/test cases/d/2 static library/meson.build create mode 100644 meson/test cases/d/2 static library/test.json create mode 100644 meson/test cases/d/3 shared library/app.d create mode 100644 meson/test cases/d/3 shared library/libstuff.d create mode 100644 meson/test cases/d/3 shared library/libstuff.di create mode 100644 meson/test cases/d/3 shared library/lld-test.py create mode 100644 meson/test cases/d/3 shared library/meson.build create mode 100644 meson/test cases/d/3 shared library/sub/libstuff.d create mode 100644 meson/test cases/d/3 shared library/sub/meson.build create mode 100644 meson/test cases/d/3 shared library/test.json create mode 100644 meson/test cases/d/4 library versions/lib.d create mode 100644 meson/test cases/d/4 library versions/meson.build create mode 100644 meson/test cases/d/4 library versions/test.json create mode 100644 meson/test cases/d/5 mixed/app.d create mode 100644 meson/test cases/d/5 mixed/libstuff.c create mode 100644 meson/test cases/d/5 mixed/meson.build create mode 100644 meson/test cases/d/5 mixed/test.json create mode 100644 meson/test cases/d/6 unittest/app.d create mode 100644 meson/test cases/d/6 unittest/meson.build create mode 100644 meson/test cases/d/6 unittest/second_unit.d create mode 100644 meson/test cases/d/6 unittest/test.json create mode 100644 meson/test cases/d/7 multilib/app.d create mode 100644 meson/test cases/d/7 multilib/meson.build create mode 100644 meson/test cases/d/7 multilib/say1.d create mode 100644 meson/test cases/d/7 multilib/say1.di create mode 100644 meson/test cases/d/7 multilib/say2.d create mode 100644 meson/test cases/d/7 multilib/say2.di create mode 100644 meson/test cases/d/7 multilib/test.json create mode 100644 meson/test cases/d/8 has multi arguments/meson.build create mode 100644 meson/test cases/d/9 features/app.d create mode 100644 meson/test cases/d/9 features/data/food.txt create mode 100644 meson/test cases/d/9 features/data/people.txt create mode 100644 meson/test cases/d/9 features/extra.d create mode 100644 meson/test cases/d/9 features/meson.build create mode 100644 meson/test cases/failing build/1 vala c werror/meson.build create mode 100644 meson/test cases/failing build/1 vala c werror/prog.vala create mode 100644 meson/test cases/failing build/1 vala c werror/unused-var.c create mode 100644 meson/test cases/failing build/2 hidden symbol/bob.c create mode 100644 meson/test cases/failing build/2 hidden symbol/bob.h create mode 100644 meson/test cases/failing build/2 hidden symbol/bobuser.c create mode 100644 meson/test cases/failing build/2 hidden symbol/meson.build create mode 100644 meson/test cases/failing build/3 pch disabled/c/meson.build create mode 100644 meson/test cases/failing build/3 pch disabled/c/pch/prog.h create mode 100644 meson/test cases/failing build/3 pch disabled/c/pch/prog_pch.c create mode 100644 meson/test cases/failing build/3 pch disabled/c/prog.c create mode 100644 meson/test cases/failing build/3 pch disabled/meson.build create mode 100644 meson/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp create mode 100644 meson/test cases/failing build/4 cmake subproject isolation/main.cpp create mode 100644 meson/test cases/failing build/4 cmake subproject isolation/meson.build create mode 100644 meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt create mode 100644 meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp create mode 100644 meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp create mode 100644 meson/test cases/failing build/5 failed pickled/false.py create mode 100644 meson/test cases/failing build/5 failed pickled/meson.build create mode 100644 meson/test cases/failing test/1 trivial/main.c create mode 100644 meson/test cases/failing test/1 trivial/meson.build create mode 100644 meson/test cases/failing test/2 signal/main.c create mode 100644 meson/test cases/failing test/2 signal/meson.build create mode 100644 meson/test cases/failing test/3 ambiguous/main.c create mode 100644 meson/test cases/failing test/3 ambiguous/meson.build create mode 100755 meson/test cases/failing test/3 ambiguous/test_runner.sh create mode 100644 meson/test cases/failing test/4 hard error/main.c create mode 100644 meson/test cases/failing test/4 hard error/meson.build create mode 100644 meson/test cases/failing test/5 tap tests/meson.build create mode 100644 meson/test cases/failing test/5 tap tests/tester.c create mode 100644 meson/test cases/failing test/5 tap tests/tester_with_status.c create mode 100644 meson/test cases/failing test/6 xpass/meson.build create mode 100644 meson/test cases/failing test/6 xpass/xpass.c create mode 100644 meson/test cases/failing/1 project not first/meson.build create mode 100644 meson/test cases/failing/1 project not first/prog.c create mode 100644 meson/test cases/failing/1 project not first/test.json create mode 100644 meson/test cases/failing/10 out of bounds/meson.build create mode 100644 meson/test cases/failing/10 out of bounds/test.json create mode 100644 meson/test cases/failing/100 no lang/main.c create mode 100644 meson/test cases/failing/100 no lang/meson.build create mode 100644 meson/test cases/failing/100 no lang/test.json create mode 100644 meson/test cases/failing/101 no glib-compile-resources/meson.build create mode 100644 meson/test cases/failing/101 no glib-compile-resources/test.json create mode 100644 meson/test cases/failing/101 no glib-compile-resources/trivial.gresource.xml create mode 100644 meson/test cases/failing/102 number in combo/meson.build create mode 100644 meson/test cases/failing/102 number in combo/nativefile.ini create mode 100644 meson/test cases/failing/102 number in combo/test.json create mode 100644 meson/test cases/failing/103 bool in combo/meson.build create mode 100644 meson/test cases/failing/103 bool in combo/meson_options.txt create mode 100644 meson/test cases/failing/103 bool in combo/nativefile.ini create mode 100644 meson/test cases/failing/103 bool in combo/test.json create mode 100644 meson/test cases/failing/104 compiler no lang/meson.build create mode 100644 meson/test cases/failing/104 compiler no lang/test.json create mode 100644 meson/test cases/failing/105 no fallback/meson.build create mode 100644 meson/test cases/failing/105 no fallback/subprojects/foob/meson.build create mode 100644 meson/test cases/failing/105 no fallback/test.json create mode 100644 meson/test cases/failing/106 feature require/meson.build create mode 100644 meson/test cases/failing/106 feature require/meson_options.txt create mode 100644 meson/test cases/failing/106 feature require/test.json create mode 100644 meson/test cases/failing/107 no build get_external_property/meson.build create mode 100644 meson/test cases/failing/107 no build get_external_property/test.json create mode 100644 meson/test cases/failing/108 enter subdir twice/meson.build create mode 100644 meson/test cases/failing/108 enter subdir twice/sub/meson.build create mode 100644 meson/test cases/failing/108 enter subdir twice/test.json create mode 100644 meson/test cases/failing/109 invalid fstring/meson.build create mode 100644 meson/test cases/failing/109 invalid fstring/test.json create mode 100644 meson/test cases/failing/11 object arithmetic/meson.build create mode 100644 meson/test cases/failing/11 object arithmetic/test.json create mode 100644 meson/test cases/failing/110 invalid fstring/meson.build create mode 100644 meson/test cases/failing/110 invalid fstring/test.json create mode 100644 meson/test cases/failing/111 compiler argument checking/meson.build create mode 100644 meson/test cases/failing/111 compiler argument checking/test.json create mode 100644 meson/test cases/failing/112 empty fallback/meson.build create mode 100644 meson/test cases/failing/112 empty fallback/subprojects/foo/meson.build create mode 100644 meson/test cases/failing/112 empty fallback/test.json create mode 100644 meson/test cases/failing/113 cmake executable dependency/meson.build create mode 100644 meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/CMakeLists.txt create mode 100644 meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/main.c create mode 100644 meson/test cases/failing/113 cmake executable dependency/test.json create mode 100644 meson/test cases/failing/114 allow_fallback with fallback/meson.build create mode 100644 meson/test cases/failing/114 allow_fallback with fallback/test.json create mode 100644 meson/test cases/failing/12 string arithmetic/meson.build create mode 100644 meson/test cases/failing/12 string arithmetic/test.json create mode 100644 meson/test cases/failing/13 array arithmetic/meson.build create mode 100644 meson/test cases/failing/13 array arithmetic/test.json create mode 100644 meson/test cases/failing/14 invalid option name/meson.build create mode 100644 meson/test cases/failing/14 invalid option name/meson_options.txt create mode 100644 meson/test cases/failing/14 invalid option name/test.json create mode 100644 meson/test cases/failing/15 kwarg before arg/meson.build create mode 100644 meson/test cases/failing/15 kwarg before arg/prog.c create mode 100644 meson/test cases/failing/15 kwarg before arg/test.json create mode 100644 meson/test cases/failing/16 extract from subproject/main.c create mode 100644 meson/test cases/failing/16 extract from subproject/meson.build create mode 100644 meson/test cases/failing/16 extract from subproject/subprojects/sub_project/meson.build create mode 100644 meson/test cases/failing/16 extract from subproject/subprojects/sub_project/sub_lib.c create mode 100644 meson/test cases/failing/16 extract from subproject/test.json create mode 100644 meson/test cases/failing/17 same target/file.c create mode 100644 meson/test cases/failing/17 same target/meson.build create mode 100644 meson/test cases/failing/17 same target/test.json create mode 100644 meson/test cases/failing/18 wrong plusassign/meson.build create mode 100644 meson/test cases/failing/18 wrong plusassign/test.json create mode 100644 meson/test cases/failing/19 target clash/clash.c create mode 100644 meson/test cases/failing/19 target clash/meson.build create mode 100644 meson/test cases/failing/19 target clash/test.json create mode 100644 meson/test cases/failing/2 missing file/meson.build create mode 100644 meson/test cases/failing/2 missing file/test.json create mode 100644 meson/test cases/failing/20 version/meson.build create mode 100644 meson/test cases/failing/20 version/test.json create mode 100644 meson/test cases/failing/21 subver/meson.build create mode 100644 meson/test cases/failing/21 subver/subprojects/foo/meson.build create mode 100644 meson/test cases/failing/21 subver/test.json create mode 100644 meson/test cases/failing/22 assert/meson.build create mode 100644 meson/test cases/failing/22 assert/test.json create mode 100644 meson/test cases/failing/23 rel testdir/meson.build create mode 100644 meson/test cases/failing/23 rel testdir/simple.c create mode 100644 meson/test cases/failing/23 rel testdir/test.json create mode 100644 meson/test cases/failing/24 int conversion/meson.build create mode 100644 meson/test cases/failing/24 int conversion/test.json create mode 100644 meson/test cases/failing/25 badlang/meson.build create mode 100644 meson/test cases/failing/25 badlang/test.json create mode 100644 meson/test cases/failing/26 output subdir/foo.in create mode 100644 meson/test cases/failing/26 output subdir/meson.build create mode 100644 meson/test cases/failing/26 output subdir/subdir/dummy.txt create mode 100644 meson/test cases/failing/26 output subdir/test.json create mode 100644 meson/test cases/failing/27 noprog use/meson.build create mode 100644 meson/test cases/failing/27 noprog use/test.json create mode 100644 meson/test cases/failing/28 no crossprop/meson.build create mode 100644 meson/test cases/failing/28 no crossprop/test.json create mode 100644 meson/test cases/failing/29 nested ternary/meson.build create mode 100644 meson/test cases/failing/29 nested ternary/test.json create mode 100644 meson/test cases/failing/3 missing subdir/meson.build create mode 100644 meson/test cases/failing/3 missing subdir/test.json create mode 100644 meson/test cases/failing/30 invalid man extension/foo.a1 create mode 100644 meson/test cases/failing/30 invalid man extension/meson.build create mode 100644 meson/test cases/failing/30 invalid man extension/test.json create mode 100644 meson/test cases/failing/31 no man extension/foo create mode 100644 meson/test cases/failing/31 no man extension/meson.build create mode 100644 meson/test cases/failing/31 no man extension/test.json create mode 100644 meson/test cases/failing/32 exe static shared/meson.build create mode 100644 meson/test cases/failing/32 exe static shared/prog.c create mode 100644 meson/test cases/failing/32 exe static shared/shlib2.c create mode 100644 meson/test cases/failing/32 exe static shared/stat.c create mode 100644 meson/test cases/failing/32 exe static shared/test.json create mode 100644 meson/test cases/failing/33 non-root subproject/meson.build create mode 100644 meson/test cases/failing/33 non-root subproject/some/meson.build create mode 100644 meson/test cases/failing/33 non-root subproject/test.json create mode 100644 meson/test cases/failing/34 dependency not-required then required/meson.build create mode 100644 meson/test cases/failing/34 dependency not-required then required/test.json create mode 100644 meson/test cases/failing/35 project argument after target/exe.c create mode 100644 meson/test cases/failing/35 project argument after target/meson.build create mode 100644 meson/test cases/failing/35 project argument after target/test.json create mode 100644 meson/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build create mode 100644 meson/test cases/failing/36 pkgconfig dependency impossible conditions/test.json create mode 100644 meson/test cases/failing/37 has function external dependency/meson.build create mode 100644 meson/test cases/failing/37 has function external dependency/mylib.c create mode 100644 meson/test cases/failing/37 has function external dependency/test.json create mode 100644 meson/test cases/failing/38 libdir must be inside prefix/meson.build create mode 100644 meson/test cases/failing/38 libdir must be inside prefix/test.json create mode 100644 meson/test cases/failing/39 prefix absolute/meson.build create mode 100644 meson/test cases/failing/39 prefix absolute/test.json create mode 100644 meson/test cases/failing/4 missing meson.build/meson.build create mode 100644 meson/test cases/failing/4 missing meson.build/subdir/dummy.txt create mode 100644 meson/test cases/failing/4 missing meson.build/test.json create mode 100644 meson/test cases/failing/40 kwarg assign/dummy.c create mode 100644 meson/test cases/failing/40 kwarg assign/meson.build create mode 100644 meson/test cases/failing/40 kwarg assign/prog.c create mode 100644 meson/test cases/failing/40 kwarg assign/test.json create mode 100644 meson/test cases/failing/41 custom target plainname many inputs/1.txt create mode 100644 meson/test cases/failing/41 custom target plainname many inputs/2.txt create mode 100644 meson/test cases/failing/41 custom target plainname many inputs/catfiles.py create mode 100644 meson/test cases/failing/41 custom target plainname many inputs/meson.build create mode 100644 meson/test cases/failing/41 custom target plainname many inputs/test.json create mode 100755 meson/test cases/failing/42 custom target outputs not matching install_dirs/generator.py create mode 100644 meson/test cases/failing/42 custom target outputs not matching install_dirs/meson.build create mode 100644 meson/test cases/failing/42 custom target outputs not matching install_dirs/test.json create mode 100644 meson/test cases/failing/43 project name colon/meson.build create mode 100644 meson/test cases/failing/43 project name colon/test.json create mode 100644 meson/test cases/failing/44 abs subdir/bob/meson.build create mode 100644 meson/test cases/failing/44 abs subdir/meson.build create mode 100644 meson/test cases/failing/44 abs subdir/test.json create mode 100644 meson/test cases/failing/45 abspath to srcdir/meson.build create mode 100644 meson/test cases/failing/45 abspath to srcdir/test.json create mode 100644 meson/test cases/failing/46 pkgconfig variables reserved/meson.build create mode 100644 meson/test cases/failing/46 pkgconfig variables reserved/simple.c create mode 100644 meson/test cases/failing/46 pkgconfig variables reserved/simple.h create mode 100644 meson/test cases/failing/46 pkgconfig variables reserved/test.json create mode 100644 meson/test cases/failing/47 pkgconfig variables zero length/meson.build create mode 100644 meson/test cases/failing/47 pkgconfig variables zero length/simple.c create mode 100644 meson/test cases/failing/47 pkgconfig variables zero length/simple.h create mode 100644 meson/test cases/failing/47 pkgconfig variables zero length/test.json create mode 100644 meson/test cases/failing/48 pkgconfig variables zero length value/meson.build create mode 100644 meson/test cases/failing/48 pkgconfig variables zero length value/simple.c create mode 100644 meson/test cases/failing/48 pkgconfig variables zero length value/simple.h create mode 100644 meson/test cases/failing/48 pkgconfig variables zero length value/test.json create mode 100644 meson/test cases/failing/49 pkgconfig variables not key value/meson.build create mode 100644 meson/test cases/failing/49 pkgconfig variables not key value/simple.c create mode 100644 meson/test cases/failing/49 pkgconfig variables not key value/simple.h create mode 100644 meson/test cases/failing/49 pkgconfig variables not key value/test.json create mode 100644 meson/test cases/failing/5 misplaced option/meson.build create mode 100644 meson/test cases/failing/5 misplaced option/test.json create mode 100644 meson/test cases/failing/50 executable comparison/meson.build create mode 100644 meson/test cases/failing/50 executable comparison/prog.c create mode 100644 meson/test cases/failing/50 executable comparison/test.json create mode 100644 meson/test cases/failing/51 inconsistent comparison/meson.build create mode 100644 meson/test cases/failing/51 inconsistent comparison/test.json create mode 100644 meson/test cases/failing/52 slashname/meson.build create mode 100644 meson/test cases/failing/52 slashname/sub/meson.build create mode 100644 meson/test cases/failing/52 slashname/sub/prog.c create mode 100644 meson/test cases/failing/52 slashname/test.json create mode 100644 meson/test cases/failing/53 reserved meson prefix/meson-foo/meson.build create mode 100644 meson/test cases/failing/53 reserved meson prefix/meson.build create mode 100644 meson/test cases/failing/53 reserved meson prefix/test.json create mode 100644 meson/test cases/failing/54 wrong shared crate type/foo.rs create mode 100644 meson/test cases/failing/54 wrong shared crate type/meson.build create mode 100644 meson/test cases/failing/54 wrong shared crate type/test.json create mode 100644 meson/test cases/failing/55 wrong static crate type/foo.rs create mode 100644 meson/test cases/failing/55 wrong static crate type/meson.build create mode 100644 meson/test cases/failing/55 wrong static crate type/test.json create mode 100644 meson/test cases/failing/56 or on new line/meson.build create mode 100644 meson/test cases/failing/56 or on new line/meson_options.txt create mode 100644 meson/test cases/failing/56 or on new line/test.json create mode 100644 meson/test cases/failing/57 link with executable/meson.build create mode 100644 meson/test cases/failing/57 link with executable/module.c create mode 100644 meson/test cases/failing/57 link with executable/prog.c create mode 100644 meson/test cases/failing/57 link with executable/test.json create mode 100644 meson/test cases/failing/58 assign custom target index/meson.build create mode 100644 meson/test cases/failing/58 assign custom target index/test.json create mode 100644 meson/test cases/failing/59 getoption prefix/meson.build create mode 100644 meson/test cases/failing/59 getoption prefix/subprojects/abc/meson.build create mode 100644 meson/test cases/failing/59 getoption prefix/subprojects/abc/meson_options.txt create mode 100644 meson/test cases/failing/59 getoption prefix/test.json create mode 100644 meson/test cases/failing/6 missing incdir/meson.build create mode 100644 meson/test cases/failing/6 missing incdir/test.json create mode 100644 meson/test cases/failing/60 bad option argument/meson.build create mode 100644 meson/test cases/failing/60 bad option argument/meson_options.txt create mode 100644 meson/test cases/failing/60 bad option argument/test.json create mode 100644 meson/test cases/failing/61 subproj filegrab/meson.build create mode 100644 meson/test cases/failing/61 subproj filegrab/prog.c create mode 100644 meson/test cases/failing/61 subproj filegrab/subprojects/a/meson.build create mode 100644 meson/test cases/failing/61 subproj filegrab/test.json create mode 100644 meson/test cases/failing/62 grab subproj/meson.build create mode 100644 meson/test cases/failing/62 grab subproj/subprojects/foo/meson.build create mode 100644 meson/test cases/failing/62 grab subproj/subprojects/foo/sub.c create mode 100644 meson/test cases/failing/62 grab subproj/test.json create mode 100644 meson/test cases/failing/63 grab sibling/meson.build create mode 100644 meson/test cases/failing/63 grab sibling/subprojects/a/meson.build create mode 100644 meson/test cases/failing/63 grab sibling/subprojects/b/meson.build create mode 100644 meson/test cases/failing/63 grab sibling/subprojects/b/sneaky.c create mode 100644 meson/test cases/failing/63 grab sibling/test.json create mode 100644 meson/test cases/failing/64 string as link target/meson.build create mode 100644 meson/test cases/failing/64 string as link target/prog.c create mode 100644 meson/test cases/failing/64 string as link target/test.json create mode 100644 meson/test cases/failing/65 dependency not-found and required/meson.build create mode 100644 meson/test cases/failing/65 dependency not-found and required/test.json create mode 100644 meson/test cases/failing/66 subproj different versions/main.c create mode 100644 meson/test cases/failing/66 subproj different versions/meson.build create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/a/a.c create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/a/a.h create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/a/meson.build create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/b/b.c create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/b/b.h create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/b/meson.build create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/c/c.h create mode 100644 meson/test cases/failing/66 subproj different versions/subprojects/c/meson.build create mode 100644 meson/test cases/failing/66 subproj different versions/test.json create mode 100644 meson/test cases/failing/67 wrong boost module/meson.build create mode 100644 meson/test cases/failing/67 wrong boost module/test.json create mode 100644 meson/test cases/failing/68 install_data rename bad size/file1.txt create mode 100644 meson/test cases/failing/68 install_data rename bad size/file2.txt create mode 100644 meson/test cases/failing/68 install_data rename bad size/meson.build create mode 100644 meson/test cases/failing/68 install_data rename bad size/test.json create mode 100644 meson/test cases/failing/69 skip only subdir/meson.build create mode 100644 meson/test cases/failing/69 skip only subdir/subdir/meson.build create mode 100644 meson/test cases/failing/69 skip only subdir/test.json create mode 100644 meson/test cases/failing/7 go to subproject/meson.build create mode 100644 meson/test cases/failing/7 go to subproject/subprojects/meson.build create mode 100644 meson/test cases/failing/7 go to subproject/test.json create mode 100644 meson/test cases/failing/70 dual override/meson.build create mode 100644 meson/test cases/failing/70 dual override/overrides.py create mode 100644 meson/test cases/failing/70 dual override/test.json create mode 100644 meson/test cases/failing/71 override used/meson.build create mode 100755 meson/test cases/failing/71 override used/other.py create mode 100755 meson/test cases/failing/71 override used/something.py create mode 100644 meson/test cases/failing/71 override used/test.json create mode 100644 meson/test cases/failing/72 run_command unclean exit/meson.build create mode 100755 meson/test cases/failing/72 run_command unclean exit/returncode.py create mode 100644 meson/test cases/failing/72 run_command unclean exit/test.json create mode 100644 meson/test cases/failing/73 int literal leading zero/meson.build create mode 100644 meson/test cases/failing/73 int literal leading zero/test.json create mode 100644 meson/test cases/failing/74 configuration immutable/input create mode 100644 meson/test cases/failing/74 configuration immutable/meson.build create mode 100644 meson/test cases/failing/74 configuration immutable/test.json create mode 100644 meson/test cases/failing/75 link with shared module on osx/meson.build create mode 100644 meson/test cases/failing/75 link with shared module on osx/module.c create mode 100644 meson/test cases/failing/75 link with shared module on osx/prog.c create mode 100644 meson/test cases/failing/75 link with shared module on osx/test.json create mode 100644 meson/test cases/failing/76 non ascii in ascii encoded configure file/config9.h.in create mode 100644 meson/test cases/failing/76 non ascii in ascii encoded configure file/meson.build create mode 100644 meson/test cases/failing/76 non ascii in ascii encoded configure file/test.json create mode 100644 meson/test cases/failing/77 subproj dependency not-found and required/meson.build create mode 100644 meson/test cases/failing/77 subproj dependency not-found and required/test.json create mode 100644 meson/test cases/failing/78 unfound run/meson.build create mode 100644 meson/test cases/failing/78 unfound run/test.json create mode 100644 meson/test cases/failing/79 framework dependency with version/meson.build create mode 100644 meson/test cases/failing/79 framework dependency with version/test.json create mode 100644 meson/test cases/failing/8 recursive/meson.build create mode 100644 meson/test cases/failing/8 recursive/subprojects/a/meson.build create mode 100644 meson/test cases/failing/8 recursive/subprojects/b/meson.build create mode 100644 meson/test cases/failing/8 recursive/test.json create mode 100644 meson/test cases/failing/80 override exe config/foo.c create mode 100644 meson/test cases/failing/80 override exe config/meson.build create mode 100644 meson/test cases/failing/80 override exe config/test.json create mode 100644 meson/test cases/failing/81 gl dependency with version/meson.build create mode 100644 meson/test cases/failing/81 gl dependency with version/test.json create mode 100644 meson/test cases/failing/82 threads dependency with version/meson.build create mode 100644 meson/test cases/failing/82 threads dependency with version/test.json create mode 100644 meson/test cases/failing/83 gtest dependency with version/meson.build create mode 100644 meson/test cases/failing/83 gtest dependency with version/test.json create mode 100644 meson/test cases/failing/84 dub libray/meson.build create mode 100644 meson/test cases/failing/84 dub libray/test.json create mode 100644 meson/test cases/failing/85 dub executable/meson.build create mode 100644 meson/test cases/failing/85 dub executable/test.json create mode 100644 meson/test cases/failing/86 dub compiler/meson.build create mode 100644 meson/test cases/failing/86 dub compiler/test.json create mode 100644 meson/test cases/failing/87 subproj not-found dep/meson.build create mode 100644 meson/test cases/failing/87 subproj not-found dep/subprojects/somesubproj/meson.build create mode 100644 meson/test cases/failing/87 subproj not-found dep/test.json create mode 100644 meson/test cases/failing/88 invalid configure file/input create mode 100644 meson/test cases/failing/88 invalid configure file/meson.build create mode 100644 meson/test cases/failing/88 invalid configure file/test.json create mode 100644 meson/test cases/failing/89 kwarg dupe/meson.build create mode 100644 meson/test cases/failing/89 kwarg dupe/prog.c create mode 100644 meson/test cases/failing/89 kwarg dupe/test.json create mode 100644 meson/test cases/failing/9 missing extra file/meson.build create mode 100644 meson/test cases/failing/9 missing extra file/prog.c create mode 100644 meson/test cases/failing/9 missing extra file/test.json create mode 100644 meson/test cases/failing/90 missing pch file/meson.build create mode 100644 meson/test cases/failing/90 missing pch file/prog.c create mode 100644 meson/test cases/failing/90 missing pch file/test.json create mode 100644 meson/test cases/failing/91 pch source different folder/include/pch.h create mode 100644 meson/test cases/failing/91 pch source different folder/meson.build create mode 100644 meson/test cases/failing/91 pch source different folder/prog.c create mode 100644 meson/test cases/failing/91 pch source different folder/src/pch.c create mode 100644 meson/test cases/failing/91 pch source different folder/test.json create mode 100644 meson/test cases/failing/92 unknown config tool/meson.build create mode 100644 meson/test cases/failing/92 unknown config tool/test.json create mode 100644 meson/test cases/failing/93 custom target install data/Info.plist.cpp create mode 100644 meson/test cases/failing/93 custom target install data/meson.build create mode 100644 meson/test cases/failing/93 custom target install data/preproc.py create mode 100644 meson/test cases/failing/93 custom target install data/test.json create mode 100644 meson/test cases/failing/94 add dict non string key/meson.build create mode 100644 meson/test cases/failing/94 add dict non string key/test.json create mode 100644 meson/test cases/failing/95 add dict duplicate keys/meson.build create mode 100644 meson/test cases/failing/95 add dict duplicate keys/test.json create mode 100644 meson/test cases/failing/96 no host get_external_property/meson.build create mode 100644 meson/test cases/failing/96 no host get_external_property/test.json create mode 100644 meson/test cases/failing/97 no native compiler/main.c create mode 100644 meson/test cases/failing/97 no native compiler/meson.build create mode 100644 meson/test cases/failing/97 no native compiler/test.json create mode 100644 meson/test cases/failing/98 subdir parse error/meson.build create mode 100644 meson/test cases/failing/98 subdir parse error/subdir/meson.build create mode 100644 meson/test cases/failing/98 subdir parse error/test.json create mode 100644 meson/test cases/failing/99 invalid option file/meson.build create mode 100644 meson/test cases/failing/99 invalid option file/meson_options.txt create mode 100644 meson/test cases/failing/99 invalid option file/test.json create mode 100644 meson/test cases/fortran/1 basic/meson.build create mode 100644 meson/test cases/fortran/1 basic/simple.f90 create mode 100644 meson/test cases/fortran/10 find library/gzip.f90 create mode 100644 meson/test cases/fortran/10 find library/main.f90 create mode 100644 meson/test cases/fortran/10 find library/meson.build create mode 100644 meson/test cases/fortran/11 compiles links runs/meson.build create mode 100644 meson/test cases/fortran/12 submodule/a1.f90 create mode 100644 meson/test cases/fortran/12 submodule/a2.f90 create mode 100644 meson/test cases/fortran/12 submodule/a3.f90 create mode 100644 meson/test cases/fortran/12 submodule/child.f90 create mode 100644 meson/test cases/fortran/12 submodule/meson.build create mode 100644 meson/test cases/fortran/12 submodule/parent.f90 create mode 100644 meson/test cases/fortran/13 coarray/main.f90 create mode 100644 meson/test cases/fortran/13 coarray/meson.build create mode 100644 meson/test cases/fortran/14 fortran links c/clib.c create mode 100644 meson/test cases/fortran/14 fortran links c/clib.def create mode 100644 meson/test cases/fortran/14 fortran links c/f_call_c.f90 create mode 100644 meson/test cases/fortran/14 fortran links c/meson.build create mode 100644 meson/test cases/fortran/15 include/inc1.f90 create mode 100644 meson/test cases/fortran/15 include/inc2.f90 create mode 100644 meson/test cases/fortran/15 include/include_hierarchy.f90 create mode 100644 meson/test cases/fortran/15 include/include_syntax.f90 create mode 100644 meson/test cases/fortran/15 include/meson.build create mode 100644 meson/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt create mode 100644 meson/test cases/fortran/15 include/subprojects/cmake_inc/main.f90 create mode 100644 meson/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90 create mode 100644 meson/test cases/fortran/15 include/timestwo.f90 create mode 100644 meson/test cases/fortran/16 openmp/main.f90 create mode 100644 meson/test cases/fortran/16 openmp/meson.build create mode 100644 meson/test cases/fortran/17 add_languages/meson.build create mode 100644 meson/test cases/fortran/18 first_arg/main.f90 create mode 100644 meson/test cases/fortran/18 first_arg/meson.build create mode 100644 meson/test cases/fortran/19 fortran_std/legacy.f create mode 100644 meson/test cases/fortran/19 fortran_std/meson.build create mode 100644 meson/test cases/fortran/19 fortran_std/std2003.f90 create mode 100644 meson/test cases/fortran/19 fortran_std/std2008.f90 create mode 100644 meson/test cases/fortran/19 fortran_std/std2018.f90 create mode 100644 meson/test cases/fortran/19 fortran_std/std95.f90 create mode 100644 meson/test cases/fortran/2 modules/comment_mod.f90 create mode 100644 meson/test cases/fortran/2 modules/meson.build create mode 100644 meson/test cases/fortran/2 modules/mymod.F90 create mode 100644 meson/test cases/fortran/2 modules/prog.f90 create mode 100644 meson/test cases/fortran/20 buildtype/main.f90 create mode 100644 meson/test cases/fortran/20 buildtype/meson.build create mode 100644 meson/test cases/fortran/21 install static/main.f90 create mode 100644 meson/test cases/fortran/21 install static/main_lib.f90 create mode 100644 meson/test cases/fortran/21 install static/meson.build create mode 100644 meson/test cases/fortran/21 install static/subprojects/static_hello/meson.build create mode 100644 meson/test cases/fortran/21 install static/subprojects/static_hello/static_hello.f90 create mode 100644 meson/test cases/fortran/21 install static/test.json create mode 100644 meson/test cases/fortran/3 module procedure/meson.build create mode 100644 meson/test cases/fortran/3 module procedure/use_syntax.f90 create mode 100644 meson/test cases/fortran/4 self dependency/meson.build create mode 100644 meson/test cases/fortran/4 self dependency/selfdep.f90 create mode 100644 meson/test cases/fortran/4 self dependency/src/selfdep_mod.f90 create mode 100644 meson/test cases/fortran/4 self dependency/subprojects/sub1/main.f90 create mode 100644 meson/test cases/fortran/4 self dependency/subprojects/sub1/meson.build create mode 100644 meson/test cases/fortran/5 static/main.f90 create mode 100644 meson/test cases/fortran/5 static/meson.build create mode 100644 meson/test cases/fortran/5 static/static_hello.f90 create mode 100644 meson/test cases/fortran/6 dynamic/dynamic.f90 create mode 100644 meson/test cases/fortran/6 dynamic/main.f90 create mode 100644 meson/test cases/fortran/6 dynamic/meson.build create mode 100644 meson/test cases/fortran/7 generated/meson.build create mode 100644 meson/test cases/fortran/7 generated/mod1.fpp create mode 100644 meson/test cases/fortran/7 generated/mod2.fpp create mode 100644 meson/test cases/fortran/7 generated/mod3.fpp create mode 100644 meson/test cases/fortran/7 generated/prog.f90 create mode 100644 meson/test cases/fortran/8 module names/meson.build create mode 100644 meson/test cases/fortran/8 module names/mod1.f90 create mode 100644 meson/test cases/fortran/8 module names/mod2.f90 create mode 100644 meson/test cases/fortran/8 module names/test.f90 create mode 100644 meson/test cases/fortran/9 cpp/fortran.f create mode 100644 meson/test cases/fortran/9 cpp/main.c create mode 100644 meson/test cases/fortran/9 cpp/main.cpp create mode 100644 meson/test cases/fortran/9 cpp/meson.build create mode 100644 meson/test cases/fpga/1 simple/meson.build create mode 100644 meson/test cases/fpga/1 simple/spin.pcf create mode 100644 meson/test cases/fpga/1 simple/spin.v create mode 100644 meson/test cases/frameworks/1 boost/extralib.cpp create mode 100644 meson/test cases/frameworks/1 boost/linkexe.cc create mode 100644 meson/test cases/frameworks/1 boost/meson.build create mode 100644 meson/test cases/frameworks/1 boost/meson_options.txt create mode 100644 meson/test cases/frameworks/1 boost/nomod.cpp create mode 100644 meson/test cases/frameworks/1 boost/partial_dep/foo.cpp create mode 100644 meson/test cases/frameworks/1 boost/partial_dep/foo.hpp create mode 100644 meson/test cases/frameworks/1 boost/partial_dep/main.cpp create mode 100644 meson/test cases/frameworks/1 boost/partial_dep/meson.build create mode 100644 meson/test cases/frameworks/1 boost/python_module.cpp create mode 100644 meson/test cases/frameworks/1 boost/test.json create mode 100644 meson/test cases/frameworks/1 boost/test_python_module.py create mode 100644 meson/test cases/frameworks/1 boost/unit_test.cpp create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar-docs.sgml create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-docs.sgml create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-sections.txt create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar.types create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar1/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar2/foobar-docs.sgml create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar2/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar3/foobar-docs.sgml create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar3/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar4/foobar-docs.sgml create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/foobar4/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/doc/version.xml.in create mode 100644 meson/test cases/frameworks/10 gtk-doc/foo.c create mode 100644 meson/test cases/frameworks/10 gtk-doc/include/foo-version.h.in create mode 100644 meson/test cases/frameworks/10 gtk-doc/include/foo.h create mode 100644 meson/test cases/frameworks/10 gtk-doc/include/generate-enums-docbook.py create mode 100644 meson/test cases/frameworks/10 gtk-doc/include/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/meson.build create mode 100644 meson/test cases/frameworks/10 gtk-doc/test.json create mode 100644 meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.c create mode 100644 meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.h create mode 100644 meson/test cases/frameworks/11 gir subproject/gir/meson.build create mode 100644 meson/test cases/frameworks/11 gir subproject/gir/prog.c create mode 100755 meson/test cases/frameworks/11 gir subproject/gir/prog.py create mode 100644 meson/test cases/frameworks/11 gir subproject/meson.build create mode 100644 meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.c create mode 100644 meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.h create mode 100644 meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson.build create mode 100644 meson/test cases/frameworks/11 gir subproject/test.json create mode 100644 meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.c create mode 100644 meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.h create mode 100644 meson/test cases/frameworks/12 multiple gir/gir/meson.build create mode 100644 meson/test cases/frameworks/12 multiple gir/gir/prog.c create mode 100644 meson/test cases/frameworks/12 multiple gir/meson.build create mode 100644 meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.c create mode 100644 meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.h.in create mode 100644 meson/test cases/frameworks/12 multiple gir/mesongir/meson.build create mode 100644 meson/test cases/frameworks/12 multiple gir/test.json create mode 100644 meson/test cases/frameworks/13 yelp/help/C/index.page create mode 100644 meson/test cases/frameworks/13 yelp/help/C/media/test.txt create mode 100644 meson/test cases/frameworks/13 yelp/help/LINGUAS create mode 100644 meson/test cases/frameworks/13 yelp/help/de/de.po create mode 100644 meson/test cases/frameworks/13 yelp/help/es/es.po create mode 100644 meson/test cases/frameworks/13 yelp/help/es/media/test.txt create mode 100644 meson/test cases/frameworks/13 yelp/help/meson.build create mode 100644 meson/test cases/frameworks/13 yelp/meson.build create mode 100644 meson/test cases/frameworks/13 yelp/test.json create mode 100644 meson/test cases/frameworks/14 doxygen/doc/Doxyfile.in create mode 100644 meson/test cases/frameworks/14 doxygen/doc/meson.build create mode 100644 meson/test cases/frameworks/14 doxygen/include/comedian.h create mode 100644 meson/test cases/frameworks/14 doxygen/include/spede.h create mode 100644 meson/test cases/frameworks/14 doxygen/meson.build create mode 100644 meson/test cases/frameworks/14 doxygen/src/spede.cpp create mode 100644 meson/test cases/frameworks/14 doxygen/test.json create mode 100644 meson/test cases/frameworks/15 llvm/meson.build create mode 100644 meson/test cases/frameworks/15 llvm/meson_options.txt create mode 100644 meson/test cases/frameworks/15 llvm/sum.c create mode 100644 meson/test cases/frameworks/15 llvm/test.json create mode 100644 meson/test cases/frameworks/16 sdl2/meson.build create mode 100644 meson/test cases/frameworks/16 sdl2/meson_options.txt create mode 100644 meson/test cases/frameworks/16 sdl2/sdl2prog.c create mode 100644 meson/test cases/frameworks/16 sdl2/test.json create mode 100644 meson/test cases/frameworks/17 mpi/main.c create mode 100644 meson/test cases/frameworks/17 mpi/main.cpp create mode 100644 meson/test cases/frameworks/17 mpi/main.f90 create mode 100644 meson/test cases/frameworks/17 mpi/meson.build create mode 100644 meson/test cases/frameworks/17 mpi/meson_options.txt create mode 100644 meson/test cases/frameworks/17 mpi/test.json create mode 100644 meson/test cases/frameworks/18 vulkan/meson.build create mode 100644 meson/test cases/frameworks/18 vulkan/vulkanprog.c create mode 100644 meson/test cases/frameworks/19 pcap/meson.build create mode 100644 meson/test cases/frameworks/19 pcap/pcap_prog.c create mode 100644 meson/test cases/frameworks/2 gtest/meson.build create mode 100644 meson/test cases/frameworks/2 gtest/test.cc create mode 100644 meson/test cases/frameworks/2 gtest/test_nomain.cc create mode 100644 meson/test cases/frameworks/20 cups/cups_prog.c create mode 100644 meson/test cases/frameworks/20 cups/meson.build create mode 100644 meson/test cases/frameworks/21 libwmf/libwmf_prog.c create mode 100644 meson/test cases/frameworks/21 libwmf/meson.build create mode 100644 meson/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.c create mode 100644 meson/test cases/frameworks/22 gir link order/fake-gthread/fake-gthread.h create mode 100644 meson/test cases/frameworks/22 gir link order/fake-gthread/meson.build create mode 100644 meson/test cases/frameworks/22 gir link order/get-prgname/get-prgname.c create mode 100644 meson/test cases/frameworks/22 gir link order/get-prgname/get-prgname.h create mode 100644 meson/test cases/frameworks/22 gir link order/get-prgname/meson.build create mode 100644 meson/test cases/frameworks/22 gir link order/meson-sample.c create mode 100644 meson/test cases/frameworks/22 gir link order/meson-sample.h create mode 100644 meson/test cases/frameworks/22 gir link order/meson.build create mode 100644 meson/test cases/frameworks/23 hotdoc/doc/index.md create mode 100644 meson/test cases/frameworks/23 hotdoc/doc/meson.build create mode 100644 meson/test cases/frameworks/23 hotdoc/doc/sitemap.txt create mode 100644 meson/test cases/frameworks/23 hotdoc/meson.build create mode 100644 meson/test cases/frameworks/23 hotdoc/test.json create mode 100644 meson/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c create mode 100644 meson/test cases/frameworks/24 libgcrypt/meson.build create mode 100644 meson/test cases/frameworks/25 hdf5/main.c create mode 100644 meson/test cases/frameworks/25 hdf5/main.cpp create mode 100644 meson/test cases/frameworks/25 hdf5/main.f90 create mode 100644 meson/test cases/frameworks/25 hdf5/meson.build create mode 100644 meson/test cases/frameworks/25 hdf5/meson_options.txt create mode 100644 meson/test cases/frameworks/25 hdf5/test.json create mode 100644 meson/test cases/frameworks/26 netcdf/main.c create mode 100644 meson/test cases/frameworks/26 netcdf/main.cpp create mode 100644 meson/test cases/frameworks/26 netcdf/main.f90 create mode 100644 meson/test cases/frameworks/26 netcdf/meson.build create mode 100644 meson/test cases/frameworks/26 netcdf/test.json create mode 100644 meson/test cases/frameworks/27 gpgme/gpgme_prog.c create mode 100644 meson/test cases/frameworks/27 gpgme/meson.build create mode 100644 meson/test cases/frameworks/28 gir link order 2/meson-sample.c create mode 100644 meson/test cases/frameworks/28 gir link order 2/meson-sample.h create mode 100644 meson/test cases/frameworks/28 gir link order 2/meson.build create mode 100644 meson/test cases/frameworks/28 gir link order 2/samelibname/meson.build create mode 100644 meson/test cases/frameworks/29 blocks/main.c create mode 100644 meson/test cases/frameworks/29 blocks/meson.build create mode 100644 meson/test cases/frameworks/29 blocks/test.json create mode 100644 meson/test cases/frameworks/3 gmock/gmocktest.cc create mode 100644 meson/test cases/frameworks/3 gmock/meson.build create mode 100644 meson/test cases/frameworks/30 scalapack/cmake/FindSCALAPACK.cmake create mode 100644 meson/test cases/frameworks/30 scalapack/main.c create mode 100644 meson/test cases/frameworks/30 scalapack/main.f90 create mode 100644 meson/test cases/frameworks/30 scalapack/meson.build create mode 100644 meson/test cases/frameworks/30 scalapack/test.json create mode 100644 meson/test cases/frameworks/31 curses/main.c create mode 100644 meson/test cases/frameworks/31 curses/meson.build create mode 100644 meson/test cases/frameworks/31 curses/meson_options.txt create mode 100644 meson/test cases/frameworks/31 curses/test.json create mode 100644 meson/test cases/frameworks/32 boost root/boost/include/boost/version.hpp create mode 100644 meson/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib create mode 100644 meson/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib create mode 100644 meson/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 create mode 100644 meson/test cases/frameworks/32 boost root/meson.build create mode 100644 meson/test cases/frameworks/32 boost root/nativefile.ini.in create mode 100644 meson/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp create mode 100644 meson/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib create mode 100644 meson/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib create mode 100644 meson/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 create mode 100644 meson/test cases/frameworks/33 boost split root/meson.build create mode 100644 meson/test cases/frameworks/33 boost split root/nativefile.ini.in create mode 100644 meson/test cases/frameworks/34 gir static lib/meson.build create mode 100644 meson/test cases/frameworks/34 gir static lib/statichelper/meson-sample.c create mode 100644 meson/test cases/frameworks/34 gir static lib/statichelper/meson-sample.h.in create mode 100644 meson/test cases/frameworks/34 gir static lib/statichelper/meson.build create mode 100644 meson/test cases/frameworks/34 gir static lib/subdir/gir/meson-subsample.c create mode 100644 meson/test cases/frameworks/34 gir static lib/subdir/gir/meson-subsample.h create mode 100644 meson/test cases/frameworks/34 gir static lib/subdir/gir/meson.build create mode 100644 meson/test cases/frameworks/34 gir static lib/subdir/gir/prog.c create mode 100644 meson/test cases/frameworks/34 gir static lib/test.json create mode 100644 meson/test cases/frameworks/4 qt/main.cpp create mode 100644 meson/test cases/frameworks/4 qt/mainWindow.cpp create mode 100644 meson/test cases/frameworks/4 qt/mainWindow.h create mode 100644 meson/test cases/frameworks/4 qt/mainWindow.ui create mode 100644 meson/test cases/frameworks/4 qt/manualinclude.cpp create mode 100644 meson/test cases/frameworks/4 qt/manualinclude.h create mode 100644 meson/test cases/frameworks/4 qt/meson.build create mode 100644 meson/test cases/frameworks/4 qt/meson_options.txt create mode 100644 meson/test cases/frameworks/4 qt/plugin/plugin.cpp create mode 100644 meson/test cases/frameworks/4 qt/plugin/plugin.h create mode 100644 meson/test cases/frameworks/4 qt/plugin/plugin.json create mode 100644 meson/test cases/frameworks/4 qt/pluginInterface/plugin_if.h create mode 100644 meson/test cases/frameworks/4 qt/q5core.cpp create mode 100644 meson/test cases/frameworks/4 qt/qt4_lang.qrc create mode 100644 meson/test cases/frameworks/4 qt/qt4core_fr.ts create mode 100644 meson/test cases/frameworks/4 qt/qt4embedded_fr.ts create mode 100644 meson/test cases/frameworks/4 qt/qt5_lang.qrc create mode 100644 meson/test cases/frameworks/4 qt/qt5core_fr.ts create mode 100644 meson/test cases/frameworks/4 qt/qt5embedded_fr.ts create mode 100644 meson/test cases/frameworks/4 qt/qtinterface.cpp create mode 100644 meson/test cases/frameworks/4 qt/stuff.qrc create mode 100644 meson/test cases/frameworks/4 qt/stuff2.qrc create mode 100644 meson/test cases/frameworks/4 qt/subfolder/generator.py create mode 100644 meson/test cases/frameworks/4 qt/subfolder/main.cpp create mode 100644 meson/test cases/frameworks/4 qt/subfolder/meson.build create mode 100644 meson/test cases/frameworks/4 qt/subfolder/resources/stuff3.qrc create mode 100644 meson/test cases/frameworks/4 qt/subfolder/resources/stuff4.qrc.in create mode 100644 meson/test cases/frameworks/4 qt/subfolder/resources/thing.png create mode 100644 meson/test cases/frameworks/4 qt/test.json create mode 100644 meson/test cases/frameworks/4 qt/thing.png create mode 100644 meson/test cases/frameworks/4 qt/thing2.png create mode 100644 meson/test cases/frameworks/5 protocol buffers/asubdir/defs.proto create mode 100644 meson/test cases/frameworks/5 protocol buffers/asubdir/main.cpp create mode 100644 meson/test cases/frameworks/5 protocol buffers/asubdir/meson.build create mode 100644 meson/test cases/frameworks/5 protocol buffers/defs.proto create mode 100644 meson/test cases/frameworks/5 protocol buffers/main.cpp create mode 100644 meson/test cases/frameworks/5 protocol buffers/meson.build create mode 100644 meson/test cases/frameworks/5 protocol buffers/sidedir/meson.build create mode 100644 meson/test cases/frameworks/5 protocol buffers/sidedir/sideprog.cpp create mode 100644 meson/test cases/frameworks/5 protocol buffers/withpath/com/mesonbuild/simple.proto create mode 100644 meson/test cases/frameworks/5 protocol buffers/withpath/com/mesonbuild/subsite/complex.proto create mode 100644 meson/test cases/frameworks/5 protocol buffers/withpath/meson.build create mode 100644 meson/test cases/frameworks/5 protocol buffers/withpath/pathprog.cpp create mode 100644 meson/test cases/frameworks/6 gettext/data/data3/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/data/data3/test.desktop.in create mode 100644 meson/test cases/frameworks/6 gettext/data/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/data/test.desktop.in create mode 100644 meson/test cases/frameworks/6 gettext/data/test2.desktop.in create mode 100644 meson/test cases/frameworks/6 gettext/data/test5.desktop.in.in create mode 100644 meson/test cases/frameworks/6 gettext/data/test6.desktop.in.in create mode 100644 meson/test cases/frameworks/6 gettext/data2/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/data2/test.desktop.in create mode 100644 meson/test cases/frameworks/6 gettext/generated/desktopgenerator.py create mode 100644 meson/test cases/frameworks/6 gettext/generated/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/generated/something.desktop.in.in create mode 100644 meson/test cases/frameworks/6 gettext/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/po/LINGUAS create mode 100644 meson/test cases/frameworks/6 gettext/po/POTFILES create mode 100644 meson/test cases/frameworks/6 gettext/po/de.po create mode 100644 meson/test cases/frameworks/6 gettext/po/fi.po create mode 100644 meson/test cases/frameworks/6 gettext/po/intltest.pot create mode 100644 meson/test cases/frameworks/6 gettext/po/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/po/ru.po create mode 100644 meson/test cases/frameworks/6 gettext/src/intlmain.c create mode 100644 meson/test cases/frameworks/6 gettext/src/meson.build create mode 100644 meson/test cases/frameworks/6 gettext/test.json create mode 100644 meson/test cases/frameworks/7 gnome/gdbus/data/com.example.Sample.xml create mode 100644 meson/test cases/frameworks/7 gnome/gdbus/gdbusprog.c create mode 100644 meson/test cases/frameworks/7 gnome/gdbus/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/genmarshal/main.c create mode 100644 meson/test cases/frameworks/7 gnome/genmarshal/marshaller.list create mode 100644 meson/test cases/frameworks/7 gnome/genmarshal/meson.build create mode 100755 meson/test cases/frameworks/7 gnome/gir/copy.py create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep1.c create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep1.h create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep2/dep2.c create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep2/dep2.h create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep2/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/gir/dep1/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/gir/meson-sample.c create mode 100644 meson/test cases/frameworks/7 gnome/gir/meson-sample.h create mode 100644 meson/test cases/frameworks/7 gnome/gir/meson-sample2.c create mode 100644 meson/test cases/frameworks/7 gnome/gir/meson-sample2.h create mode 100644 meson/test cases/frameworks/7 gnome/gir/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/gir/prog.c create mode 100755 meson/test cases/frameworks/7 gnome/gir/prog.py create mode 100644 meson/test cases/frameworks/7 gnome/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/enums.c.in create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/enums.h.in create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/enums2.c.in create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/enums2.h.in create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/main.c create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/main4.c create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/main5.c create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/meson-decls.h create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/meson-sample.h create mode 100644 meson/test cases/frameworks/7 gnome/mkenums/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/resources-data/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/resources-data/res1.txt create mode 100644 meson/test cases/frameworks/7 gnome/resources-data/res3.txt.in create mode 100644 meson/test cases/frameworks/7 gnome/resources-data/subdir/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/resources-data/subdir/res2.txt create mode 100644 meson/test cases/frameworks/7 gnome/resources-data/subdir/res4.txt.in create mode 100644 meson/test cases/frameworks/7 gnome/resources/copyfile.py create mode 100644 meson/test cases/frameworks/7 gnome/resources/generated-main.c create mode 100644 meson/test cases/frameworks/7 gnome/resources/generated.gresource.xml create mode 100644 meson/test cases/frameworks/7 gnome/resources/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/resources/myresource.gresource.xml create mode 100644 meson/test cases/frameworks/7 gnome/resources/res3.txt create mode 100644 meson/test cases/frameworks/7 gnome/resources/resources.py create mode 100644 meson/test cases/frameworks/7 gnome/resources/simple-main.c create mode 100644 meson/test cases/frameworks/7 gnome/resources/simple.gresource.xml create mode 100644 meson/test cases/frameworks/7 gnome/schemas/com.github.meson.gschema.xml create mode 100644 meson/test cases/frameworks/7 gnome/schemas/meson.build create mode 100644 meson/test cases/frameworks/7 gnome/schemas/schemaprog.c create mode 100644 meson/test cases/frameworks/7 gnome/test.json create mode 100644 meson/test cases/frameworks/8 flex/lexer.l create mode 100644 meson/test cases/frameworks/8 flex/meson.build create mode 100644 meson/test cases/frameworks/8 flex/parser.y create mode 100644 meson/test cases/frameworks/8 flex/prog.c create mode 100644 meson/test cases/frameworks/8 flex/test.txt create mode 100644 meson/test cases/frameworks/9 wxwidgets/mainwin.h create mode 100644 meson/test cases/frameworks/9 wxwidgets/meson.build create mode 100644 meson/test cases/frameworks/9 wxwidgets/wxprog.cpp create mode 100644 meson/test cases/frameworks/9 wxwidgets/wxstc.cpp create mode 100644 meson/test cases/java/1 basic/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/1 basic/meson.build create mode 100644 meson/test cases/java/1 basic/test.json create mode 100644 meson/test cases/java/2 subdir/meson.build create mode 100644 meson/test cases/java/2 subdir/sub/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/2 subdir/sub/com/mesonbuild/TextPrinter.java create mode 100644 meson/test cases/java/2 subdir/sub/meson.build create mode 100644 meson/test cases/java/3 args/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/3 args/meson.build create mode 100644 meson/test cases/java/4 inner class/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/4 inner class/meson.build create mode 100644 meson/test cases/java/5 includedirs/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/5 includedirs/com/mesonbuild/TextPrinter.java create mode 100644 meson/test cases/java/5 includedirs/meson.build create mode 100644 meson/test cases/java/6 codegen/com/mesonbuild/Config.java.in create mode 100644 meson/test cases/java/6 codegen/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/6 codegen/com/mesonbuild/TextPrinter.java create mode 100644 meson/test cases/java/6 codegen/com/mesonbuild/meson.build create mode 100644 meson/test cases/java/6 codegen/meson.build create mode 100644 meson/test cases/java/7 linking/com/mesonbuild/Linking.java create mode 100644 meson/test cases/java/7 linking/meson.build create mode 100644 meson/test cases/java/7 linking/sub/com/mesonbuild/SimpleLib.java create mode 100644 meson/test cases/java/7 linking/sub/meson.build create mode 100644 meson/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in create mode 100644 meson/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java create mode 100644 meson/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java create mode 100644 meson/test cases/java/8 codegen custom target/com/mesonbuild/meson.build create mode 100644 meson/test cases/java/8 codegen custom target/meson.build create mode 100644 meson/test cases/java/9 jdk/lib/com_mesonbuild_JdkTest.c create mode 100644 meson/test cases/java/9 jdk/lib/com_mesonbuild_JdkTest.h create mode 100644 meson/test cases/java/9 jdk/lib/meson.build create mode 100644 meson/test cases/java/9 jdk/lib/native.c create mode 100644 meson/test cases/java/9 jdk/meson.build create mode 100644 meson/test cases/java/9 jdk/src/com/mesonbuild/JdkTest.java create mode 100644 meson/test cases/java/9 jdk/src/meson.build create mode 100644 meson/test cases/keyval/1 basic/.config create mode 100644 meson/test cases/keyval/1 basic/meson.build create mode 100644 meson/test cases/keyval/1 basic/test.json create mode 100644 meson/test cases/keyval/2 subdir/.config create mode 100644 meson/test cases/keyval/2 subdir/dir/meson.build create mode 100644 meson/test cases/keyval/2 subdir/meson.build create mode 100644 meson/test cases/keyval/3 load_config files/dir/config create mode 100644 meson/test cases/keyval/3 load_config files/dir/meson.build create mode 100644 meson/test cases/keyval/3 load_config files/meson.build create mode 100644 meson/test cases/keyval/4 load_config builddir/config create mode 100644 meson/test cases/keyval/4 load_config builddir/meson.build create mode 100644 meson/test cases/linuxlike/1 pkg-config/incdir/myinc.h create mode 100644 meson/test cases/linuxlike/1 pkg-config/meson.build create mode 100644 meson/test cases/linuxlike/1 pkg-config/prog-checkver.c create mode 100644 meson/test cases/linuxlike/1 pkg-config/prog.c create mode 100644 meson/test cases/linuxlike/10 large file support/meson.build create mode 100644 meson/test cases/linuxlike/11 runpath rpath ldlibrarypath/lib.c create mode 100644 meson/test cases/linuxlike/11 runpath rpath ldlibrarypath/lib1/meson.build create mode 100644 meson/test cases/linuxlike/11 runpath rpath ldlibrarypath/lib2/meson.build create mode 100644 meson/test cases/linuxlike/11 runpath rpath ldlibrarypath/main.c create mode 100644 meson/test cases/linuxlike/11 runpath rpath ldlibrarypath/meson.build create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/main.c create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/meson.build create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/a/a.c create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/a/a.h create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/a/meson.build create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/b/b.c create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/b/b.h create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/b/meson.build create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/c/c.h create mode 100644 meson/test cases/linuxlike/12 subprojects in subprojects/subprojects/c/meson.build create mode 100755 meson/test cases/linuxlike/13 cmake dependency/cmVers.sh create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake/FindImportedTarget.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_fake3/bin/.gitkeep create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_fake3/lib/cmake/cmMesonTestF3/cmMesonTestF3Config.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_pref_env/lib/cmake/cmMesonTestDep/cmMesonTestDepConfig.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_pref_env/lib/cmake/cmMesonVersionedTestDep/cmMesonVersionedTestDepConfig.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/cmake_pref_env/lib/cmake/cmMesonVersionedTestDep/cmMesonVersionedTestDepConfigVersion.cmake create mode 100644 meson/test cases/linuxlike/13 cmake dependency/incdir/myinc.h create mode 100644 meson/test cases/linuxlike/13 cmake dependency/meson.build create mode 100644 meson/test cases/linuxlike/13 cmake dependency/prog-checkver.c create mode 100644 meson/test cases/linuxlike/13 cmake dependency/prog.c create mode 100644 meson/test cases/linuxlike/13 cmake dependency/test.json create mode 100644 meson/test cases/linuxlike/13 cmake dependency/testFlagSet.c create mode 100644 meson/test cases/linuxlike/14 static dynamic linkage/main.c create mode 100644 meson/test cases/linuxlike/14 static dynamic linkage/meson.build create mode 100755 meson/test cases/linuxlike/14 static dynamic linkage/verify_static.py create mode 100644 meson/test cases/linuxlike/15 ld binary/meson.build create mode 100644 meson/test cases/linuxlike/2 external library/meson.build create mode 100644 meson/test cases/linuxlike/2 external library/prog.c create mode 100644 meson/test cases/linuxlike/3 linker script/bob.c create mode 100644 meson/test cases/linuxlike/3 linker script/bob.h create mode 100644 meson/test cases/linuxlike/3 linker script/bob.map create mode 100644 meson/test cases/linuxlike/3 linker script/bob.map.in create mode 100644 meson/test cases/linuxlike/3 linker script/copy.py create mode 100644 meson/test cases/linuxlike/3 linker script/meson.build create mode 100644 meson/test cases/linuxlike/3 linker script/prog.c create mode 100644 meson/test cases/linuxlike/3 linker script/sub/foo.map create mode 100644 meson/test cases/linuxlike/3 linker script/sub/meson.build create mode 100644 meson/test cases/linuxlike/4 extdep static lib/lib.c create mode 100644 meson/test cases/linuxlike/4 extdep static lib/meson.build create mode 100644 meson/test cases/linuxlike/4 extdep static lib/prog.c create mode 100644 meson/test cases/linuxlike/5 dependency versions/meson.build create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/fakezlib/meson.build create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/somelib/lib.c create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/somelib/meson.build create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/somelibnover/lib.c create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/somelibnover/meson.build create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/somelibver/lib.c create mode 100644 meson/test cases/linuxlike/5 dependency versions/subprojects/somelibver/meson.build create mode 100644 meson/test cases/linuxlike/6 subdir include order/meson.build create mode 100644 meson/test cases/linuxlike/6 subdir include order/prog.c create mode 100644 meson/test cases/linuxlike/6 subdir include order/subdir/glib.h create mode 100644 meson/test cases/linuxlike/7 library versions/exe.orig.c create mode 100644 meson/test cases/linuxlike/7 library versions/lib.c create mode 100644 meson/test cases/linuxlike/7 library versions/meson.build create mode 100644 meson/test cases/linuxlike/7 library versions/test.json create mode 100644 meson/test cases/linuxlike/8 subproject library install/meson.build create mode 100644 meson/test cases/linuxlike/8 subproject library install/subprojects/sublib/include/subdefs.h create mode 100644 meson/test cases/linuxlike/8 subproject library install/subprojects/sublib/meson.build create mode 100644 meson/test cases/linuxlike/8 subproject library install/subprojects/sublib/sublib.c create mode 100644 meson/test cases/linuxlike/8 subproject library install/test.json create mode 100644 meson/test cases/linuxlike/9 compiler checks with dependencies/meson.build create mode 100644 meson/test cases/nasm/1 configure file/hello.asm create mode 100644 meson/test cases/nasm/1 configure file/meson.build create mode 100644 meson/test cases/native/1 trivial/meson.build create mode 100644 meson/test cases/native/1 trivial/trivial.c create mode 100644 meson/test cases/native/2 global arg/meson.build create mode 100644 meson/test cases/native/2 global arg/prog.c create mode 100644 meson/test cases/native/2 global arg/prog.cc create mode 100755 meson/test cases/native/3 pipeline/depends/copyrunner.py create mode 100644 meson/test cases/native/3 pipeline/depends/filecopier.c create mode 100644 meson/test cases/native/3 pipeline/depends/libsrc.c.in create mode 100644 meson/test cases/native/3 pipeline/depends/meson.build create mode 100644 meson/test cases/native/3 pipeline/depends/prog.c create mode 100644 meson/test cases/native/3 pipeline/input_src.dat create mode 100644 meson/test cases/native/3 pipeline/meson.build create mode 100644 meson/test cases/native/3 pipeline/prog.c create mode 100644 meson/test cases/native/3 pipeline/src/input_src.dat create mode 100644 meson/test cases/native/3 pipeline/src/meson.build create mode 100644 meson/test cases/native/3 pipeline/src/prog.c create mode 100644 meson/test cases/native/3 pipeline/src/srcgen.c create mode 100644 meson/test cases/native/3 pipeline/srcgen.c create mode 100644 meson/test cases/native/4 tryrun/error.c create mode 100644 meson/test cases/native/4 tryrun/meson.build create mode 100644 meson/test cases/native/4 tryrun/no_compile.c create mode 100644 meson/test cases/native/4 tryrun/ok.c create mode 100644 meson/test cases/native/5 install script/file.txt create mode 100644 meson/test cases/native/5 install script/meson.build create mode 100644 meson/test cases/native/5 install script/src/exe.c create mode 100644 meson/test cases/native/5 install script/src/meson.build create mode 100644 meson/test cases/native/5 install script/test.json create mode 100755 meson/test cases/native/5 install script/wrap.py create mode 100644 meson/test cases/native/6 add language/meson.build create mode 100644 meson/test cases/native/6 add language/prog.cc create mode 100644 meson/test cases/native/7 selfbuilt custom/checkarg.cpp create mode 100644 meson/test cases/native/7 selfbuilt custom/data.dat create mode 100644 meson/test cases/native/7 selfbuilt custom/mainprog.cpp create mode 100644 meson/test cases/native/7 selfbuilt custom/meson.build create mode 100644 meson/test cases/native/7 selfbuilt custom/tool.cpp create mode 100644 meson/test cases/native/8 external program shebang parsing/input.txt create mode 100644 meson/test cases/native/8 external program shebang parsing/main.c create mode 100644 meson/test cases/native/8 external program shebang parsing/meson.build create mode 100644 meson/test cases/native/8 external program shebang parsing/script.int.in create mode 100644 meson/test cases/native/9 override with exe/main2.input create mode 100644 meson/test cases/native/9 override with exe/meson.build create mode 100644 meson/test cases/native/9 override with exe/subprojects/sub/foobar.c create mode 100644 meson/test cases/native/9 override with exe/subprojects/sub/meson.build create mode 100644 meson/test cases/objc/1 simple/meson.build create mode 100644 meson/test cases/objc/1 simple/prog.m create mode 100644 meson/test cases/objc/2 nsstring/meson.build create mode 100644 meson/test cases/objc/2 nsstring/stringprog.m create mode 100644 meson/test cases/objc/3 objc args/meson.build create mode 100644 meson/test cases/objc/3 objc args/prog.m create mode 100644 meson/test cases/objc/4 c++ project objc subproject/master.cpp create mode 100644 meson/test cases/objc/4 c++ project objc subproject/meson.build create mode 100644 meson/test cases/objc/4 c++ project objc subproject/subprojects/foo/foo.m create mode 100644 meson/test cases/objc/4 c++ project objc subproject/subprojects/foo/meson.build create mode 100644 meson/test cases/objcpp/1 simple/meson.build create mode 100644 meson/test cases/objcpp/1 simple/prog.mm create mode 100644 meson/test cases/objcpp/2 objc++ args/meson.build create mode 100644 meson/test cases/objcpp/2 objc++ args/prog.mm create mode 100644 meson/test cases/osx/1 basic/main.c create mode 100644 meson/test cases/osx/1 basic/meson.build create mode 100644 meson/test cases/osx/2 library versions/CMakeLists.txt create mode 100644 meson/test cases/osx/2 library versions/exe.orig.c create mode 100644 meson/test cases/osx/2 library versions/lib.c create mode 100644 meson/test cases/osx/2 library versions/meson.build create mode 100644 meson/test cases/osx/2 library versions/require_pkgconfig.py create mode 100644 meson/test cases/osx/2 library versions/test.json create mode 100644 meson/test cases/osx/3 has function xcode8/meson.build create mode 100644 meson/test cases/osx/4 framework/meson.build create mode 100644 meson/test cases/osx/4 framework/prog.c create mode 100644 meson/test cases/osx/4 framework/stat.c create mode 100644 meson/test cases/osx/4 framework/test.json create mode 100644 meson/test cases/osx/4 framework/xcode-frameworks.png create mode 100644 meson/test cases/osx/5 extra frameworks/meson.build create mode 100644 meson/test cases/osx/5 extra frameworks/prog.c create mode 100644 meson/test cases/osx/5 extra frameworks/stat.c create mode 100644 meson/test cases/osx/5 extra frameworks/test.json create mode 100644 meson/test cases/osx/6 multiframework/main.m create mode 100644 meson/test cases/osx/6 multiframework/meson.build create mode 100644 meson/test cases/osx/7 bitcode/libbar.mm create mode 100644 meson/test cases/osx/7 bitcode/libfile.c create mode 100644 meson/test cases/osx/7 bitcode/libfoo.m create mode 100644 meson/test cases/osx/7 bitcode/meson.build create mode 100644 meson/test cases/osx/7 bitcode/vis.h create mode 100644 meson/test cases/osx/8 pie/main.c create mode 100644 meson/test cases/osx/8 pie/meson.build create mode 100644 meson/test cases/python/1 basic/gluon/__init__.py create mode 100644 meson/test cases/python/1 basic/gluon/gluonator.py create mode 100644 meson/test cases/python/1 basic/meson.build create mode 100755 meson/test cases/python/1 basic/prog.py create mode 100644 meson/test cases/python/1 basic/subdir/meson.build create mode 100755 meson/test cases/python/1 basic/subdir/subprog.py create mode 100755 meson/test cases/python/2 extmodule/blaster.py.in create mode 100644 meson/test cases/python/2 extmodule/ext/meson.build create mode 100644 meson/test cases/python/2 extmodule/ext/nested/meson.build create mode 100644 meson/test cases/python/2 extmodule/ext/tachyon_module.c create mode 100644 meson/test cases/python/2 extmodule/ext/wrongdir/meson.build create mode 100644 meson/test cases/python/2 extmodule/meson.build create mode 100644 meson/test cases/python/2 extmodule/test.json create mode 100755 meson/test cases/python/3 cython/cytest.py create mode 100644 meson/test cases/python/3 cython/libdir/cstorer.pxd create mode 100644 meson/test cases/python/3 cython/libdir/meson.build create mode 100644 meson/test cases/python/3 cython/libdir/storer.c create mode 100644 meson/test cases/python/3 cython/libdir/storer.h create mode 100644 meson/test cases/python/3 cython/libdir/storer.pyx create mode 100644 meson/test cases/python/3 cython/meson.build create mode 100644 meson/test cases/python/4 custom target depends extmodule/blaster.py create mode 100644 meson/test cases/python/4 custom target depends extmodule/ext/lib/meson-tachyonlib.c create mode 100644 meson/test cases/python/4 custom target depends extmodule/ext/lib/meson-tachyonlib.h create mode 100644 meson/test cases/python/4 custom target depends extmodule/ext/lib/meson.build create mode 100644 meson/test cases/python/4 custom target depends extmodule/ext/meson.build create mode 100644 meson/test cases/python/4 custom target depends extmodule/ext/tachyon_module.c create mode 100644 meson/test cases/python/4 custom target depends extmodule/meson.build create mode 100644 meson/test cases/python/5 modules kwarg/meson.build create mode 100644 meson/test cases/python/6 failing subproject/meson.build create mode 100644 meson/test cases/python/6 failing subproject/subprojects/bar/meson.build create mode 100644 meson/test cases/python3/1 basic/gluon/__init__.py create mode 100644 meson/test cases/python3/1 basic/gluon/gluonator.py create mode 100644 meson/test cases/python3/1 basic/meson.build create mode 100755 meson/test cases/python3/1 basic/prog.py create mode 100644 meson/test cases/python3/1 basic/subdir/meson.build create mode 100755 meson/test cases/python3/1 basic/subdir/subprog.py create mode 100755 meson/test cases/python3/2 extmodule/blaster.py create mode 100644 meson/test cases/python3/2 extmodule/ext/meson.build create mode 100644 meson/test cases/python3/2 extmodule/ext/tachyon_module.c create mode 100644 meson/test cases/python3/2 extmodule/meson.build create mode 100755 meson/test cases/python3/3 cython/cytest.py create mode 100644 meson/test cases/python3/3 cython/libdir/cstorer.pxd create mode 100644 meson/test cases/python3/3 cython/libdir/meson.build create mode 100644 meson/test cases/python3/3 cython/libdir/storer.c create mode 100644 meson/test cases/python3/3 cython/libdir/storer.h create mode 100644 meson/test cases/python3/3 cython/libdir/storer.pyx create mode 100644 meson/test cases/python3/3 cython/meson.build create mode 100644 meson/test cases/python3/4 custom target depends extmodule/blaster.py create mode 100644 meson/test cases/python3/4 custom target depends extmodule/ext/lib/meson-tachyonlib.c create mode 100644 meson/test cases/python3/4 custom target depends extmodule/ext/lib/meson-tachyonlib.h create mode 100644 meson/test cases/python3/4 custom target depends extmodule/ext/lib/meson.build create mode 100644 meson/test cases/python3/4 custom target depends extmodule/ext/meson.build create mode 100644 meson/test cases/python3/4 custom target depends extmodule/ext/tachyon_module.c create mode 100644 meson/test cases/python3/4 custom target depends extmodule/meson.build create mode 100644 meson/test cases/rewrite/1 basic/addSrc.json create mode 100644 meson/test cases/rewrite/1 basic/addTgt.json create mode 100644 meson/test cases/rewrite/1 basic/info.json create mode 100644 meson/test cases/rewrite/1 basic/meson.build create mode 100644 meson/test cases/rewrite/1 basic/rmSrc.json create mode 100644 meson/test cases/rewrite/1 basic/rmTgt.json create mode 100644 meson/test cases/rewrite/2 subdirs/addSrc.json create mode 100644 meson/test cases/rewrite/2 subdirs/addTgt.json create mode 100644 meson/test cases/rewrite/2 subdirs/info.json create mode 100644 meson/test cases/rewrite/2 subdirs/meson.build create mode 100644 meson/test cases/rewrite/2 subdirs/rmTgt.json create mode 100644 meson/test cases/rewrite/2 subdirs/sub1/meson.build create mode 100644 meson/test cases/rewrite/2 subdirs/sub2/meson.build create mode 100644 meson/test cases/rewrite/3 kwargs/add.json create mode 100644 meson/test cases/rewrite/3 kwargs/defopts_delete.json create mode 100644 meson/test cases/rewrite/3 kwargs/defopts_set.json create mode 100644 meson/test cases/rewrite/3 kwargs/delete.json create mode 100644 meson/test cases/rewrite/3 kwargs/info.json create mode 100644 meson/test cases/rewrite/3 kwargs/meson.build create mode 100644 meson/test cases/rewrite/3 kwargs/remove.json create mode 100644 meson/test cases/rewrite/3 kwargs/remove_regex.json create mode 100644 meson/test cases/rewrite/3 kwargs/set.json create mode 100644 meson/test cases/rewrite/4 same name targets/addSrc.json create mode 100644 meson/test cases/rewrite/4 same name targets/info.json create mode 100644 meson/test cases/rewrite/4 same name targets/meson.build create mode 100644 meson/test cases/rewrite/4 same name targets/sub1/meson.build create mode 100644 meson/test cases/rewrite/5 sorting/meson.build create mode 100644 meson/test cases/rust/1 basic/meson.build create mode 100644 meson/test cases/rust/1 basic/prog.rs create mode 100644 meson/test cases/rust/1 basic/subdir/meson.build create mode 100644 meson/test cases/rust/1 basic/subdir/prog.rs create mode 100644 meson/test cases/rust/1 basic/test.json create mode 100644 meson/test cases/rust/10 language stds/2015.rs create mode 100644 meson/test cases/rust/10 language stds/2018.rs create mode 100644 meson/test cases/rust/10 language stds/meson.build create mode 100644 meson/test cases/rust/11 generated main/gen.py create mode 100644 meson/test cases/rust/11 generated main/generated_lib_main.rs create mode 100644 meson/test cases/rust/11 generated main/meson.build create mode 100644 meson/test cases/rust/11 generated main/sub/meson.build create mode 100644 meson/test cases/rust/12 bindgen/include/other.h create mode 100644 meson/test cases/rust/12 bindgen/meson.build create mode 100644 meson/test cases/rust/12 bindgen/src/gen_header.py create mode 100644 meson/test cases/rust/12 bindgen/src/header.h create mode 100644 meson/test cases/rust/12 bindgen/src/main.rs create mode 100644 meson/test cases/rust/12 bindgen/src/main2.rs create mode 100644 meson/test cases/rust/12 bindgen/src/source.c create mode 100644 meson/test cases/rust/12 bindgen/sub/meson.build create mode 100644 meson/test cases/rust/13 external c dependencies/c_accessing_zlib.c create mode 100644 meson/test cases/rust/13 external c dependencies/meson.build create mode 100644 meson/test cases/rust/13 external c dependencies/meson_options.txt create mode 100644 meson/test cases/rust/13 external c dependencies/prog.rs create mode 100644 meson/test cases/rust/13 external c dependencies/test.json create mode 100644 meson/test cases/rust/14 external libm/meson.build create mode 100644 meson/test cases/rust/14 external libm/meson_options.txt create mode 100644 meson/test cases/rust/14 external libm/prog.rs create mode 100644 meson/test cases/rust/14 external libm/rs_math.rs create mode 100644 meson/test cases/rust/14 external libm/test.json create mode 100644 meson/test cases/rust/15 polyglot sharedlib/adder.c create mode 100644 meson/test cases/rust/15 polyglot sharedlib/adder.h create mode 100644 meson/test cases/rust/15 polyglot sharedlib/adder.rs create mode 100644 meson/test cases/rust/15 polyglot sharedlib/addertest.c create mode 100644 meson/test cases/rust/15 polyglot sharedlib/meson.build create mode 100644 meson/test cases/rust/16 internal c dependencies/lib.c create mode 100644 meson/test cases/rust/16 internal c dependencies/lib.h create mode 100644 meson/test cases/rust/16 internal c dependencies/main.rs create mode 100644 meson/test cases/rust/16 internal c dependencies/meson.build create mode 100755 meson/test cases/rust/16 internal c dependencies/test.py create mode 100644 meson/test cases/rust/2 sharedlib/meson.build create mode 100644 meson/test cases/rust/2 sharedlib/prog.rs create mode 100644 meson/test cases/rust/2 sharedlib/stuff.rs create mode 100644 meson/test cases/rust/2 sharedlib/test.json create mode 100644 meson/test cases/rust/3 staticlib/meson.build create mode 100644 meson/test cases/rust/3 staticlib/prog.rs create mode 100644 meson/test cases/rust/3 staticlib/stuff.rs create mode 100644 meson/test cases/rust/3 staticlib/test.json create mode 100644 meson/test cases/rust/4 polyglot/meson.build create mode 100644 meson/test cases/rust/4 polyglot/prog.c create mode 100644 meson/test cases/rust/4 polyglot/stuff.rs create mode 100644 meson/test cases/rust/4 polyglot/test.json create mode 100644 meson/test cases/rust/5 polyglot static/meson.build create mode 100644 meson/test cases/rust/5 polyglot static/prog.c create mode 100644 meson/test cases/rust/5 polyglot static/stuff.rs create mode 100644 meson/test cases/rust/5 polyglot static/test.json create mode 100644 meson/test cases/rust/6 named staticlib/meson.build create mode 100644 meson/test cases/rust/6 named staticlib/prog.rs create mode 100644 meson/test cases/rust/6 named staticlib/stuff.rs create mode 100644 meson/test cases/rust/6 named staticlib/test.json create mode 100644 meson/test cases/rust/7 private crate collision/meson.build create mode 100644 meson/test cases/rust/7 private crate collision/prog.rs create mode 100644 meson/test cases/rust/7 private crate collision/rand.rs create mode 100644 meson/test cases/rust/7 private crate collision/test.json create mode 100644 meson/test cases/rust/8 many files/foo.rs create mode 100644 meson/test cases/rust/8 many files/main.rs create mode 100644 meson/test cases/rust/8 many files/meson.build create mode 100644 meson/test cases/rust/9 unit tests/meson.build create mode 100644 meson/test cases/rust/9 unit tests/test.rs create mode 100644 meson/test cases/rust/9 unit tests/test2.rs create mode 100644 meson/test cases/swift/1 exe/main.swift create mode 100644 meson/test cases/swift/1 exe/meson.build create mode 100644 meson/test cases/swift/2 multifile/libfile.swift create mode 100644 meson/test cases/swift/2 multifile/main.swift create mode 100644 meson/test cases/swift/2 multifile/meson.build create mode 100644 meson/test cases/swift/3 library/exe/main.swift create mode 100644 meson/test cases/swift/3 library/exe/meson.build create mode 100644 meson/test cases/swift/3 library/lib/datasource.swift create mode 100644 meson/test cases/swift/3 library/lib/meson.build create mode 100644 meson/test cases/swift/3 library/lib/othersource.swift create mode 100644 meson/test cases/swift/3 library/meson.build create mode 100644 meson/test cases/swift/4 generate/gen/main.swift create mode 100644 meson/test cases/swift/4 generate/gen/meson.build create mode 100644 meson/test cases/swift/4 generate/meson.build create mode 100644 meson/test cases/swift/4 generate/user/main.swift create mode 100644 meson/test cases/swift/4 generate/user/meson.build create mode 100644 meson/test cases/swift/5 mixed/main.swift create mode 100644 meson/test cases/swift/5 mixed/meson.build create mode 100644 meson/test cases/swift/5 mixed/mylib.c create mode 100644 meson/test cases/swift/5 mixed/mylib.h create mode 100644 meson/test cases/swift/6 modulemap/main.swift create mode 100644 meson/test cases/swift/6 modulemap/meson.build create mode 100644 meson/test cases/swift/6 modulemap/module.modulemap create mode 100644 meson/test cases/swift/6 modulemap/mylib.c create mode 100644 meson/test cases/swift/6 modulemap/mylib.h create mode 100644 meson/test cases/swift/7 modulemap subdir/main.swift create mode 100644 meson/test cases/swift/7 modulemap subdir/meson.build create mode 100644 meson/test cases/swift/7 modulemap subdir/mylib/meson.build create mode 100644 meson/test cases/swift/7 modulemap subdir/mylib/module.modulemap create mode 100644 meson/test cases/swift/7 modulemap subdir/mylib/mylib.c create mode 100644 meson/test cases/swift/7 modulemap subdir/mylib/mylib.h create mode 100644 meson/test cases/unit/1 soname/CMakeLists.txt create mode 100644 meson/test cases/unit/1 soname/meson.build create mode 100644 meson/test cases/unit/1 soname/versioned.c create mode 100644 meson/test cases/unit/10 build_rpath/meson.build create mode 100644 meson/test cases/unit/10 build_rpath/prog.c create mode 100644 meson/test cases/unit/10 build_rpath/prog.cc create mode 100644 meson/test cases/unit/10 build_rpath/sub/meson.build create mode 100644 meson/test cases/unit/10 build_rpath/sub/stuff.c create mode 100644 meson/test cases/unit/100 rlib linkage/lib2.rs create mode 100644 meson/test cases/unit/100 rlib linkage/main.rs create mode 100644 meson/test cases/unit/100 rlib linkage/meson.build create mode 100644 meson/test cases/unit/11 cross prog/meson.build create mode 100755 meson/test cases/unit/11 cross prog/some_cross_tool.py create mode 100755 meson/test cases/unit/11 cross prog/sometool.py create mode 100644 meson/test cases/unit/12 promote/meson.build create mode 100644 meson/test cases/unit/12 promote/subprojects/s1/meson.build create mode 100644 meson/test cases/unit/12 promote/subprojects/s1/s1.c create mode 100644 meson/test cases/unit/12 promote/subprojects/s1/subprojects/s3/meson.build create mode 100644 meson/test cases/unit/12 promote/subprojects/s1/subprojects/s3/s3.c create mode 100644 meson/test cases/unit/12 promote/subprojects/s1/subprojects/scommon/meson.build create mode 100644 meson/test cases/unit/12 promote/subprojects/s1/subprojects/scommon/scommon_broken.c create mode 100644 meson/test cases/unit/12 promote/subprojects/s2/meson.build create mode 100644 meson/test cases/unit/12 promote/subprojects/s2/s2.c create mode 100644 meson/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap create mode 100644 meson/test cases/unit/12 promote/subprojects/s2/subprojects/scommon/meson.build create mode 100644 meson/test cases/unit/12 promote/subprojects/s2/subprojects/scommon/scommon_ok.c create mode 100644 meson/test cases/unit/13 reconfigure/meson.build create mode 100644 meson/test cases/unit/14 testsetup selection/main.c create mode 100644 meson/test cases/unit/14 testsetup selection/meson.build create mode 100644 meson/test cases/unit/14 testsetup selection/subprojects/bar/bar.c create mode 100644 meson/test cases/unit/14 testsetup selection/subprojects/bar/meson.build create mode 100644 meson/test cases/unit/14 testsetup selection/subprojects/foo/foo.c create mode 100644 meson/test cases/unit/14 testsetup selection/subprojects/foo/meson.build create mode 100644 meson/test cases/unit/15 prebuilt object/main.c create mode 100644 meson/test cases/unit/15 prebuilt object/meson.build create mode 100644 meson/test cases/unit/15 prebuilt object/source.c create mode 100644 meson/test cases/unit/16 prebuilt static/libdir/best.c create mode 100644 meson/test cases/unit/16 prebuilt static/libdir/best.h create mode 100644 meson/test cases/unit/16 prebuilt static/libdir/meson.build create mode 100644 meson/test cases/unit/16 prebuilt static/main.c create mode 100644 meson/test cases/unit/16 prebuilt static/meson.build create mode 100644 meson/test cases/unit/17 prebuilt shared/alexandria.c create mode 100644 meson/test cases/unit/17 prebuilt shared/alexandria.h create mode 100644 meson/test cases/unit/17 prebuilt shared/another_visitor.c create mode 100644 meson/test cases/unit/17 prebuilt shared/meson.build create mode 100644 meson/test cases/unit/17 prebuilt shared/patron.c create mode 100644 meson/test cases/unit/18 pkgconfig static/foo.c create mode 100644 meson/test cases/unit/18 pkgconfig static/foo.pc.in create mode 100644 meson/test cases/unit/18 pkgconfig static/include/foo.h create mode 100644 meson/test cases/unit/18 pkgconfig static/main.c create mode 100644 meson/test cases/unit/18 pkgconfig static/meson.build create mode 100644 meson/test cases/unit/19 array option/meson.build create mode 100644 meson/test cases/unit/19 array option/meson_options.txt create mode 100644 meson/test cases/unit/2 testsetups/buggy.c create mode 100755 meson/test cases/unit/2 testsetups/envcheck.py create mode 100644 meson/test cases/unit/2 testsetups/impl.c create mode 100644 meson/test cases/unit/2 testsetups/impl.h create mode 100644 meson/test cases/unit/2 testsetups/meson.build create mode 100644 meson/test cases/unit/20 subproj dep variables/meson.build create mode 100644 meson/test cases/unit/20 subproj dep variables/subprojects/failingsubproj/meson.build create mode 100644 meson/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build create mode 100644 meson/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap create mode 100644 meson/test cases/unit/20 subproj dep variables/subprojects/somesubproj/meson.build create mode 100644 meson/test cases/unit/21 exit status/meson.build create mode 100644 meson/test cases/unit/22 warning location/a.c create mode 100644 meson/test cases/unit/22 warning location/b.c create mode 100644 meson/test cases/unit/22 warning location/conf.in create mode 100644 meson/test cases/unit/22 warning location/main.c create mode 100644 meson/test cases/unit/22 warning location/meson.build create mode 100644 meson/test cases/unit/22 warning location/sub/c.c create mode 100644 meson/test cases/unit/22 warning location/sub/d.c create mode 100644 meson/test cases/unit/22 warning location/sub/meson.build create mode 100644 meson/test cases/unit/22 warning location/sub/sub.c create mode 100644 meson/test cases/unit/23 unfound pkgconfig/meson.build create mode 100644 meson/test cases/unit/23 unfound pkgconfig/some.c create mode 100644 meson/test cases/unit/24 compiler run_command/meson.build create mode 100644 meson/test cases/unit/25 non-permitted kwargs/meson.build create mode 100644 meson/test cases/unit/26 install umask/datafile.cat create mode 100644 meson/test cases/unit/26 install umask/meson.build create mode 100644 meson/test cases/unit/26 install umask/myinstall.py create mode 100644 meson/test cases/unit/26 install umask/prog.1 create mode 100644 meson/test cases/unit/26 install umask/prog.c create mode 100644 meson/test cases/unit/26 install umask/sample.h create mode 100644 meson/test cases/unit/26 install umask/subdir/datafile.dog create mode 100755 meson/test cases/unit/26 install umask/subdir/sayhello create mode 100644 meson/test cases/unit/27 pkgconfig usage/dependee/meson.build create mode 100644 meson/test cases/unit/27 pkgconfig usage/dependee/pkguser.c create mode 100644 meson/test cases/unit/27 pkgconfig usage/dependency/meson.build create mode 100644 meson/test cases/unit/27 pkgconfig usage/dependency/pkgdep.c create mode 100644 meson/test cases/unit/27 pkgconfig usage/dependency/pkgdep.h create mode 100644 meson/test cases/unit/27 pkgconfig usage/dependency/privatelib.c create mode 100644 meson/test cases/unit/28 ndebug if-release/main.c create mode 100644 meson/test cases/unit/28 ndebug if-release/meson.build create mode 100644 meson/test cases/unit/29 guessed linker dependencies/exe/app.c create mode 100644 meson/test cases/unit/29 guessed linker dependencies/exe/meson.build create mode 100644 meson/test cases/unit/29 guessed linker dependencies/lib/lib.c create mode 100644 meson/test cases/unit/29 guessed linker dependencies/lib/meson.build create mode 100644 meson/test cases/unit/29 guessed linker dependencies/lib/meson_options.txt create mode 100644 meson/test cases/unit/3 subproject defaults/meson.build create mode 100644 meson/test cases/unit/3 subproject defaults/meson_options.txt create mode 100644 meson/test cases/unit/3 subproject defaults/subprojects/foob/meson.build create mode 100644 meson/test cases/unit/3 subproject defaults/subprojects/foob/meson_options.txt create mode 100644 meson/test cases/unit/30 shared_mod linking/libfile.c create mode 100644 meson/test cases/unit/30 shared_mod linking/main.c create mode 100644 meson/test cases/unit/30 shared_mod linking/meson.build create mode 100644 meson/test cases/unit/31 forcefallback/meson.build create mode 100644 meson/test cases/unit/31 forcefallback/subprojects/notzlib/meson.build create mode 100644 meson/test cases/unit/31 forcefallback/subprojects/notzlib/notzlib.c create mode 100644 meson/test cases/unit/31 forcefallback/subprojects/notzlib/notzlib.h create mode 100644 meson/test cases/unit/31 forcefallback/test_not_zlib.c create mode 100644 meson/test cases/unit/32 pkgconfig use libraries/app/app.c create mode 100644 meson/test cases/unit/32 pkgconfig use libraries/app/meson.build create mode 100644 meson/test cases/unit/32 pkgconfig use libraries/lib/liba.c create mode 100644 meson/test cases/unit/32 pkgconfig use libraries/lib/libb.c create mode 100644 meson/test cases/unit/32 pkgconfig use libraries/lib/meson.build create mode 100644 meson/test cases/unit/33 cross file overrides always args/meson.build create mode 100644 meson/test cases/unit/33 cross file overrides always args/test.c create mode 100644 meson/test cases/unit/33 cross file overrides always args/ubuntu-armhf-overrides.txt create mode 100644 meson/test cases/unit/34 command line/meson.build create mode 100644 meson/test cases/unit/34 command line/meson_options.txt create mode 100644 meson/test cases/unit/34 command line/subprojects/subp/meson.build create mode 100644 meson/test cases/unit/34 command line/subprojects/subp/meson_options.txt create mode 100644 meson/test cases/unit/35 dist script/meson.build create mode 100644 meson/test cases/unit/35 dist script/prog.c create mode 100755 meson/test cases/unit/35 dist script/replacer.py create mode 100644 meson/test cases/unit/35 dist script/subprojects/sub/dist-script.py create mode 100644 meson/test cases/unit/35 dist script/subprojects/sub/meson.build create mode 100644 meson/test cases/unit/35 dist script/subprojects/sub/meson_options.txt create mode 100644 meson/test cases/unit/35 dist script/subprojects/sub/prog.c create mode 100644 meson/test cases/unit/36 exe_wrapper behaviour/broken-cross.txt create mode 100644 meson/test cases/unit/36 exe_wrapper behaviour/meson.build create mode 100644 meson/test cases/unit/36 exe_wrapper behaviour/meson_options.txt create mode 100644 meson/test cases/unit/36 exe_wrapper behaviour/prog.c create mode 100644 meson/test cases/unit/37 mixed command line args/meson.build create mode 100644 meson/test cases/unit/37 mixed command line args/meson_options.txt create mode 100644 meson/test cases/unit/38 pkgconfig format/meson.build create mode 100644 meson/test cases/unit/38 pkgconfig format/somelib.c create mode 100644 meson/test cases/unit/38 pkgconfig format/someret.c create mode 100755 meson/test cases/unit/39 python extmodule/blaster.py create mode 100644 meson/test cases/unit/39 python extmodule/ext/meson.build create mode 100644 meson/test cases/unit/39 python extmodule/ext/tachyon_module.c create mode 100644 meson/test cases/unit/39 python extmodule/meson.build create mode 100644 meson/test cases/unit/39 python extmodule/meson_options.txt create mode 100644 meson/test cases/unit/4 suite selection/failing_test.c create mode 100644 meson/test cases/unit/4 suite selection/meson.build create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjfail/failing_test.c create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjfail/meson.build create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjmix/failing_test.c create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjmix/meson.build create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjmix/successful_test.c create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build create mode 100644 meson/test cases/unit/4 suite selection/subprojects/subprjsucc/successful_test.c create mode 100644 meson/test cases/unit/4 suite selection/successful_test.c create mode 100644 meson/test cases/unit/40 external, internal library rpath/built library/bar.c create mode 100644 meson/test cases/unit/40 external, internal library rpath/built library/meson.build create mode 100644 meson/test cases/unit/40 external, internal library rpath/built library/meson_options.txt create mode 100644 meson/test cases/unit/40 external, internal library rpath/built library/prog.c create mode 100644 meson/test cases/unit/40 external, internal library rpath/external library/bar.c create mode 100644 meson/test cases/unit/40 external, internal library rpath/external library/faa.c create mode 100644 meson/test cases/unit/40 external, internal library rpath/external library/foo.c create mode 100644 meson/test cases/unit/40 external, internal library rpath/external library/meson.build create mode 100644 meson/test cases/unit/41 featurenew subprojects/meson.build create mode 100644 meson/test cases/unit/41 featurenew subprojects/subprojects/bar/meson.build create mode 100644 meson/test cases/unit/41 featurenew subprojects/subprojects/baz/meson.build create mode 100644 meson/test cases/unit/41 featurenew subprojects/subprojects/foo/meson.build create mode 100644 meson/test cases/unit/42 rpath order/meson.build create mode 100644 meson/test cases/unit/42 rpath order/myexe.c create mode 100644 meson/test cases/unit/42 rpath order/subprojects/sub1/lib.c create mode 100644 meson/test cases/unit/42 rpath order/subprojects/sub1/meson.build create mode 100644 meson/test cases/unit/42 rpath order/subprojects/sub2/lib.c create mode 100644 meson/test cases/unit/42 rpath order/subprojects/sub2/meson.build create mode 100644 meson/test cases/unit/43 dep order/lib1.c create mode 100644 meson/test cases/unit/43 dep order/lib2.c create mode 100644 meson/test cases/unit/43 dep order/meson.build create mode 100644 meson/test cases/unit/43 dep order/myexe.c create mode 100644 meson/test cases/unit/44 promote wrap/meson.build create mode 100644 meson/test cases/unit/44 promote wrap/subprojects/s1/meson.build create mode 100644 meson/test cases/unit/44 promote wrap/subprojects/s1/subprojects/ambiguous/meson.build create mode 100644 meson/test cases/unit/44 promote wrap/subprojects/s2/meson.build create mode 100644 meson/test cases/unit/44 promote wrap/subprojects/s2/subprojects/ambiguous.wrap create mode 100644 meson/test cases/unit/45 vscpp17/main.cpp create mode 100644 meson/test cases/unit/45 vscpp17/meson.build create mode 100755 meson/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py create mode 100644 meson/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc create mode 100644 meson/test cases/unit/46 native dep pkgconfig var/meson.build create mode 100644 meson/test cases/unit/46 native dep pkgconfig var/meson_options.txt create mode 100644 meson/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc create mode 100644 meson/test cases/unit/47 native file binary/meson.build create mode 100644 meson/test cases/unit/47 native file binary/meson_options.txt create mode 100644 meson/test cases/unit/48 reconfigure/main.c create mode 100644 meson/test cases/unit/48 reconfigure/meson.build create mode 100644 meson/test cases/unit/48 reconfigure/meson_options.txt create mode 100644 meson/test cases/unit/48 reconfigure/subprojects/sub1/meson.build create mode 100644 meson/test cases/unit/49 testsetup default/envcheck.py create mode 100644 meson/test cases/unit/49 testsetup default/meson.build create mode 100644 meson/test cases/unit/5 compiler detection/compiler wrapper.py create mode 100644 meson/test cases/unit/5 compiler detection/meson.build create mode 100644 meson/test cases/unit/5 compiler detection/trivial.c create mode 100644 meson/test cases/unit/5 compiler detection/trivial.cc create mode 100644 meson/test cases/unit/5 compiler detection/trivial.m create mode 100644 meson/test cases/unit/5 compiler detection/trivial.mm create mode 100644 meson/test cases/unit/50 pkgconfig csharp library/meson.build create mode 100644 meson/test cases/unit/50 pkgconfig csharp library/somelib.cs create mode 100644 meson/test cases/unit/51 noncross options/meson.build create mode 100644 meson/test cases/unit/51 noncross options/prog.c create mode 100644 meson/test cases/unit/51 noncross options/ylib.pc create mode 100644 meson/test cases/unit/52 ldflagdedup/bob.c create mode 100644 meson/test cases/unit/52 ldflagdedup/meson.build create mode 100644 meson/test cases/unit/52 ldflagdedup/prog.c create mode 100644 meson/test cases/unit/53 pkgconfig static link order/meson.build create mode 100644 meson/test cases/unit/54 clang-format/.clang-format create mode 100644 meson/test cases/unit/54 clang-format/dummydir.h/dummy.dat create mode 100644 meson/test cases/unit/54 clang-format/header_expected_h create mode 100644 meson/test cases/unit/54 clang-format/header_orig_h create mode 100644 meson/test cases/unit/54 clang-format/meson.build create mode 100644 meson/test cases/unit/54 clang-format/prog_expected_c create mode 100644 meson/test cases/unit/54 clang-format/prog_orig_c create mode 100644 meson/test cases/unit/55 introspect buildoptions/subprojects/projectBad/meson.build create mode 100644 meson/test cases/unit/55 introspect buildoptions/subprojects/projectBad/meson_options.txt create mode 100644 meson/test cases/unit/56 dedup compiler libs/app/app.c create mode 100644 meson/test cases/unit/56 dedup compiler libs/app/meson.build create mode 100644 meson/test cases/unit/56 dedup compiler libs/liba/liba.c create mode 100644 meson/test cases/unit/56 dedup compiler libs/liba/liba.h create mode 100644 meson/test cases/unit/56 dedup compiler libs/liba/meson.build create mode 100644 meson/test cases/unit/56 dedup compiler libs/libb/libb.c create mode 100644 meson/test cases/unit/56 dedup compiler libs/libb/libb.h create mode 100644 meson/test cases/unit/56 dedup compiler libs/libb/meson.build create mode 100644 meson/test cases/unit/56 dedup compiler libs/meson.build create mode 100644 meson/test cases/unit/57 introspection/cp.py create mode 100644 meson/test cases/unit/57 introspection/meson.build create mode 100644 meson/test cases/unit/57 introspection/meson_options.txt create mode 100644 meson/test cases/unit/57 introspection/sharedlib/meson.build create mode 100644 meson/test cases/unit/57 introspection/sharedlib/shared.cpp create mode 100644 meson/test cases/unit/57 introspection/sharedlib/shared.hpp create mode 100644 meson/test cases/unit/57 introspection/staticlib/meson.build create mode 100644 meson/test cases/unit/57 introspection/staticlib/static.c create mode 100644 meson/test cases/unit/57 introspection/staticlib/static.h create mode 100644 meson/test cases/unit/57 introspection/t1.cpp create mode 100644 meson/test cases/unit/57 introspection/t2.cpp create mode 100644 meson/test cases/unit/57 introspection/t3.cpp create mode 100644 meson/test cases/unit/58 pkg_config_path option/build_extra_path/totally_made_up_dep.pc create mode 100644 meson/test cases/unit/58 pkg_config_path option/host_extra_path/totally_made_up_dep.pc create mode 100644 meson/test cases/unit/58 pkg_config_path option/meson.build create mode 100644 meson/test cases/unit/59 introspect buildoptions/c_compiler.py create mode 100644 meson/test cases/unit/59 introspect buildoptions/main.c create mode 100644 meson/test cases/unit/59 introspect buildoptions/meson.build create mode 100644 meson/test cases/unit/59 introspect buildoptions/meson_options.txt create mode 100644 meson/test cases/unit/59 introspect buildoptions/subprojects/evilFile.txt create mode 100644 meson/test cases/unit/59 introspect buildoptions/subprojects/projectA/meson.build create mode 100644 meson/test cases/unit/59 introspect buildoptions/subprojects/projectA/meson_options.txt create mode 100644 meson/test cases/unit/59 introspect buildoptions/subprojects/projectBad/meson.build create mode 100644 meson/test cases/unit/59 introspect buildoptions/subprojects/projectBad/meson_options.txt create mode 100644 meson/test cases/unit/6 std override/meson.build create mode 100644 meson/test cases/unit/6 std override/prog11.cpp create mode 100644 meson/test cases/unit/6 std override/prog98.cpp create mode 100644 meson/test cases/unit/6 std override/progp.cpp create mode 100644 meson/test cases/unit/60 native file override/crossfile create mode 100644 meson/test cases/unit/60 native file override/crossfile2 create mode 100644 meson/test cases/unit/60 native file override/meson.build create mode 100644 meson/test cases/unit/60 native file override/meson_options.txt create mode 100644 meson/test cases/unit/60 native file override/nativefile create mode 100755 meson/test cases/unit/61 identity cross/build_wrapper.py create mode 100755 meson/test cases/unit/61 identity cross/host_wrapper.py create mode 100644 meson/test cases/unit/61 identity cross/meson.build create mode 100644 meson/test cases/unit/61 identity cross/stuff.h create mode 100644 meson/test cases/unit/62 pkgconfig relative paths/pkgconfig/librelativepath.pc create mode 100644 meson/test cases/unit/63 cmake_prefix_path/meson.build create mode 100644 meson/test cases/unit/63 cmake_prefix_path/prefix/lib/cmake/mesontest/mesontest-config.cmake create mode 100644 meson/test cases/unit/64 cmake parser/meson.build create mode 100644 meson/test cases/unit/64 cmake parser/prefix/lib/cmake/mesontest/mesontest-config.cmake create mode 100644 meson/test cases/unit/65 alias target/main.c create mode 100644 meson/test cases/unit/65 alias target/meson.build create mode 100644 meson/test cases/unit/66 static archive stripping/app/appA.c create mode 100644 meson/test cases/unit/66 static archive stripping/app/appB.c create mode 100644 meson/test cases/unit/66 static archive stripping/app/meson.build create mode 100644 meson/test cases/unit/66 static archive stripping/lib/libA.c create mode 100644 meson/test cases/unit/66 static archive stripping/lib/libA.h create mode 100644 meson/test cases/unit/66 static archive stripping/lib/libB.c create mode 100644 meson/test cases/unit/66 static archive stripping/lib/libB.h create mode 100644 meson/test cases/unit/66 static archive stripping/lib/meson.build create mode 100644 meson/test cases/unit/67 static link/lib/func1.c create mode 100644 meson/test cases/unit/67 static link/lib/func10.c create mode 100644 meson/test cases/unit/67 static link/lib/func11.c create mode 100644 meson/test cases/unit/67 static link/lib/func12.c create mode 100644 meson/test cases/unit/67 static link/lib/func14.c create mode 100644 meson/test cases/unit/67 static link/lib/func15.c create mode 100644 meson/test cases/unit/67 static link/lib/func16.c create mode 100644 meson/test cases/unit/67 static link/lib/func17.c create mode 100644 meson/test cases/unit/67 static link/lib/func18.c create mode 100644 meson/test cases/unit/67 static link/lib/func19.c create mode 100644 meson/test cases/unit/67 static link/lib/func2.c create mode 100644 meson/test cases/unit/67 static link/lib/func3.c create mode 100644 meson/test cases/unit/67 static link/lib/func4.c create mode 100644 meson/test cases/unit/67 static link/lib/func5.c create mode 100644 meson/test cases/unit/67 static link/lib/func6.c create mode 100644 meson/test cases/unit/67 static link/lib/func7.c create mode 100644 meson/test cases/unit/67 static link/lib/func8.c create mode 100644 meson/test cases/unit/67 static link/lib/func9.c create mode 100644 meson/test cases/unit/67 static link/lib/meson.build create mode 100644 meson/test cases/unit/67 static link/meson.build create mode 100644 meson/test cases/unit/67 static link/test1.c create mode 100644 meson/test cases/unit/67 static link/test2.c create mode 100644 meson/test cases/unit/67 static link/test3.c create mode 100644 meson/test cases/unit/67 static link/test4.c create mode 100644 meson/test cases/unit/67 static link/test5.c create mode 100644 meson/test cases/unit/68 test env value/meson.build create mode 100755 meson/test cases/unit/68 test env value/test.py create mode 100644 meson/test cases/unit/69 clang-tidy/.clang-tidy create mode 100644 meson/test cases/unit/69 clang-tidy/cttest.cpp create mode 100644 meson/test cases/unit/69 clang-tidy/dummydir.h/dummy.dat create mode 100644 meson/test cases/unit/69 clang-tidy/meson.build create mode 100644 meson/test cases/unit/7 run installed/foo/foo.c create mode 100644 meson/test cases/unit/7 run installed/foo/meson.build create mode 100644 meson/test cases/unit/7 run installed/meson.build create mode 100644 meson/test cases/unit/7 run installed/prog.c create mode 100644 meson/test cases/unit/70 cross/crossfile.in create mode 100644 meson/test cases/unit/70 cross/meson.build create mode 100644 meson/test cases/unit/70 cross/meson_options.txt create mode 100755 meson/test cases/unit/71 cross test passed/exewrapper.py create mode 100644 meson/test cases/unit/71 cross test passed/meson.build create mode 100644 meson/test cases/unit/71 cross test passed/meson_options.txt create mode 100644 meson/test cases/unit/71 cross test passed/script.py create mode 100644 meson/test cases/unit/71 cross test passed/src/main.c create mode 100644 meson/test cases/unit/72 summary/meson.build create mode 100644 meson/test cases/unit/72 summary/meson_options.txt create mode 100644 meson/test cases/unit/72 summary/subprojects/sub/meson.build create mode 100644 meson/test cases/unit/72 summary/subprojects/sub2/meson.build create mode 100644 meson/test cases/unit/72 summary/subprojects/sub2/subprojects/subsub/meson.build create mode 100644 meson/test cases/unit/73 wrap file url/meson.build create mode 100644 meson/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz create mode 100644 meson/test cases/unit/73 wrap file url/subprojects/foo.tar.xz create mode 100644 meson/test cases/unit/74 dep files/foo.c create mode 100644 meson/test cases/unit/74 dep files/meson.build create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/client/client.c create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/client/meson.build create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/val1/meson.build create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/val1/val1.c create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/val1/val1.h create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/val2/meson.build create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/val2/val2.c create mode 100644 meson/test cases/unit/75 pkgconfig prefixes/val2/val2.h create mode 100644 meson/test cases/unit/76 subdir libdir/meson.build create mode 100644 meson/test cases/unit/76 subdir libdir/subprojects/flub/meson.build create mode 100644 meson/test cases/unit/77 as link whole/bar.c create mode 100644 meson/test cases/unit/77 as link whole/foo.c create mode 100644 meson/test cases/unit/77 as link whole/meson.build create mode 100644 meson/test cases/unit/78 nostdlib/meson.build create mode 100644 meson/test cases/unit/78 nostdlib/prog.c create mode 100644 meson/test cases/unit/78 nostdlib/subprojects/mylibc/libc.c create mode 100644 meson/test cases/unit/78 nostdlib/subprojects/mylibc/meson.build create mode 100644 meson/test cases/unit/78 nostdlib/subprojects/mylibc/stdio.h create mode 100644 meson/test cases/unit/78 nostdlib/subprojects/mylibc/stubstart.s create mode 100644 meson/test cases/unit/79 user options for subproject/.gitignore create mode 100644 meson/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore create mode 100644 meson/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build create mode 100644 meson/test cases/unit/8 -L -l order/first.pc create mode 100644 meson/test cases/unit/8 -L -l order/meson.build create mode 100644 meson/test cases/unit/8 -L -l order/prog.c create mode 100644 meson/test cases/unit/8 -L -l order/second.pc create mode 100644 meson/test cases/unit/80 global-rpath/meson.build create mode 100644 meson/test cases/unit/80 global-rpath/rpathified.cpp create mode 100644 meson/test cases/unit/80 global-rpath/yonder/meson.build create mode 100644 meson/test cases/unit/80 global-rpath/yonder/yonder.cpp create mode 100644 meson/test cases/unit/80 global-rpath/yonder/yonder.h create mode 100644 meson/test cases/unit/81 wrap-git/meson.build create mode 100644 meson/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build create mode 100644 meson/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c create mode 100644 meson/test cases/unit/82 meson version compare/meson.build create mode 100644 meson/test cases/unit/82 meson version compare/subprojects/foo/meson.build create mode 100644 meson/test cases/unit/83 cross only introspect/meson.build create mode 100644 meson/test cases/unit/84 change option choices/meson.build create mode 100644 meson/test cases/unit/84 change option choices/meson_options.1.txt create mode 100644 meson/test cases/unit/84 change option choices/meson_options.2.txt create mode 100644 meson/test cases/unit/85 nested subproject regenerate depends/main.c create mode 100644 meson/test cases/unit/85 nested subproject regenerate depends/meson.build create mode 100644 meson/test cases/unit/85 nested subproject regenerate depends/subprojects/sub1/meson.build create mode 100644 meson/test cases/unit/85 nested subproject regenerate depends/subprojects/sub2/CMakeLists.txt create mode 100644 meson/test cases/unit/86 cpp modules/main.cpp create mode 100644 meson/test cases/unit/86 cpp modules/meson.build create mode 100644 meson/test cases/unit/86 cpp modules/src0.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src1.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src2.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src3.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src4.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src5.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src6.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src7.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src8.ixx create mode 100644 meson/test cases/unit/86 cpp modules/src9.ixx create mode 100644 meson/test cases/unit/87 prelinking/file1.c create mode 100644 meson/test cases/unit/87 prelinking/file2.c create mode 100644 meson/test cases/unit/87 prelinking/file3.c create mode 100644 meson/test cases/unit/87 prelinking/file4.c create mode 100644 meson/test cases/unit/87 prelinking/main.c create mode 100644 meson/test cases/unit/87 prelinking/meson.build create mode 100644 meson/test cases/unit/87 prelinking/private_header.h create mode 100644 meson/test cases/unit/87 prelinking/public_header.h create mode 100644 meson/test cases/unit/88 run native test/main.c create mode 100644 meson/test cases/unit/88 run native test/meson.build create mode 100644 meson/test cases/unit/89 multiple envvars/meson.build create mode 100644 meson/test cases/unit/89 multiple envvars/prog.c create mode 100644 meson/test cases/unit/89 multiple envvars/prog.cpp create mode 100644 meson/test cases/unit/9 d dedup/meson.build create mode 100644 meson/test cases/unit/9 d dedup/prog.c create mode 100644 meson/test cases/unit/90 pkgconfig build rpath order/dummy.pc create mode 100644 meson/test cases/unit/90 pkgconfig build rpath order/meson.build create mode 100644 meson/test cases/unit/90 pkgconfig build rpath order/prog.c create mode 100644 meson/test cases/unit/90 pkgconfig build rpath order/prog.cc create mode 100644 meson/test cases/unit/90 pkgconfig build rpath order/sub/meson.build create mode 100644 meson/test cases/unit/90 pkgconfig build rpath order/sub/stuff.c create mode 100644 meson/test cases/unit/91 devenv/main.c create mode 100644 meson/test cases/unit/91 devenv/meson.build create mode 100644 meson/test cases/unit/91 devenv/subprojects/sub/foo.c create mode 100644 meson/test cases/unit/91 devenv/subprojects/sub/meson.build create mode 100755 meson/test cases/unit/91 devenv/test-devenv.py create mode 100644 meson/test cases/unit/92 install skip subprojects/foo.c create mode 100644 meson/test cases/unit/92 install skip subprojects/foo.dat create mode 100644 meson/test cases/unit/92 install skip subprojects/foo.h create mode 100644 meson/test cases/unit/92 install skip subprojects/foo/foofile create mode 100644 meson/test cases/unit/92 install skip subprojects/meson.build create mode 100644 meson/test cases/unit/92 install skip subprojects/subprojects/bar/bar.c create mode 100644 meson/test cases/unit/92 install skip subprojects/subprojects/bar/bar.dat create mode 100644 meson/test cases/unit/92 install skip subprojects/subprojects/bar/bar.h create mode 100644 meson/test cases/unit/92 install skip subprojects/subprojects/bar/bar/barfile create mode 100644 meson/test cases/unit/92 install skip subprojects/subprojects/bar/meson.build create mode 100644 meson/test cases/unit/93 new subproject in configured project/meson.build create mode 100644 meson/test cases/unit/93 new subproject in configured project/meson_options.txt create mode 100644 meson/test cases/unit/93 new subproject in configured project/subprojects/sub/foo.c create mode 100644 meson/test cases/unit/93 new subproject in configured project/subprojects/sub/meson.build create mode 100644 meson/test cases/unit/94 clangformat/.clang-format create mode 100644 meson/test cases/unit/94 clangformat/.clang-format-ignore create mode 100644 meson/test cases/unit/94 clangformat/.clang-format-include create mode 100644 meson/test cases/unit/94 clangformat/meson.build create mode 100644 meson/test cases/unit/94 clangformat/not-included/badformat.cpp create mode 100644 meson/test cases/unit/94 clangformat/src/badformat.c create mode 100644 meson/test cases/unit/94 clangformat/src/badformat.cpp create mode 100644 meson/test cases/unit/95 custominc/easytogrepfor/genh.py create mode 100644 meson/test cases/unit/95 custominc/easytogrepfor/meson.build create mode 100644 meson/test cases/unit/95 custominc/helper.c create mode 100644 meson/test cases/unit/95 custominc/meson.build create mode 100644 meson/test cases/unit/95 custominc/prog.c create mode 100644 meson/test cases/unit/95 custominc/prog2.c create mode 100644 meson/test cases/unit/96 implicit force fallback/meson.build create mode 100644 meson/test cases/unit/96 implicit force fallback/subprojects/something/meson.build create mode 100644 meson/test cases/unit/97 link full name/.gitignore create mode 100644 meson/test cases/unit/97 link full name/libtestprovider/meson.build create mode 100644 meson/test cases/unit/97 link full name/libtestprovider/provider.c create mode 100644 meson/test cases/unit/97 link full name/proguser/meson.build create mode 100644 meson/test cases/unit/97 link full name/proguser/receiver.c create mode 100644 meson/test cases/vala/1 basic/meson.build create mode 100644 meson/test cases/vala/1 basic/prog.vala create mode 100644 meson/test cases/vala/10 mixed sources/c/foo.c create mode 100644 meson/test cases/vala/10 mixed sources/c/meson.build create mode 100644 meson/test cases/vala/10 mixed sources/c/writec.py create mode 100644 meson/test cases/vala/10 mixed sources/meson.build create mode 100644 meson/test cases/vala/10 mixed sources/vala/bar.vala create mode 100644 meson/test cases/vala/11 generated vapi/libbar/bar.c create mode 100644 meson/test cases/vala/11 generated vapi/libbar/bar.h create mode 100644 meson/test cases/vala/11 generated vapi/libbar/meson.build create mode 100644 meson/test cases/vala/11 generated vapi/libfoo/foo.c create mode 100644 meson/test cases/vala/11 generated vapi/libfoo/foo.h create mode 100644 meson/test cases/vala/11 generated vapi/libfoo/meson.build create mode 100644 meson/test cases/vala/11 generated vapi/main.vala create mode 100644 meson/test cases/vala/11 generated vapi/meson.build create mode 100644 meson/test cases/vala/11 generated vapi/test.json create mode 100644 meson/test cases/vala/12 custom output/bar.vala create mode 100644 meson/test cases/vala/12 custom output/foo.vala create mode 100644 meson/test cases/vala/12 custom output/meson.build create mode 100644 meson/test cases/vala/13 find library/meson.build create mode 100644 meson/test cases/vala/13 find library/test.vala create mode 100644 meson/test cases/vala/14 target glib version and gresources/gres/meson.build create mode 100644 meson/test cases/vala/14 target glib version and gresources/gres/test-resources.xml create mode 100644 meson/test cases/vala/14 target glib version and gresources/gres/test.ui create mode 100644 meson/test cases/vala/14 target glib version and gresources/meson.build create mode 100644 meson/test cases/vala/14 target glib version and gresources/test.vala create mode 100644 meson/test cases/vala/15 static vapi in source tree/meson.build create mode 100644 meson/test cases/vala/15 static vapi in source tree/test.vala create mode 100644 meson/test cases/vala/15 static vapi in source tree/vapi/config.vapi create mode 100644 meson/test cases/vala/16 mixed dependence/app.vala create mode 100644 meson/test cases/vala/16 mixed dependence/meson.build create mode 100644 meson/test cases/vala/16 mixed dependence/mixer-glue.c create mode 100644 meson/test cases/vala/16 mixed dependence/mixer.vala create mode 100644 meson/test cases/vala/17 plain consumer/app.c create mode 100644 meson/test cases/vala/17 plain consumer/badger.vala create mode 100644 meson/test cases/vala/17 plain consumer/meson.build create mode 100644 meson/test cases/vala/18 vapi consumed twice/app.vala create mode 100644 meson/test cases/vala/18 vapi consumed twice/beer.vala create mode 100644 meson/test cases/vala/18 vapi consumed twice/meson.build create mode 100644 meson/test cases/vala/18 vapi consumed twice/person.vala create mode 100644 meson/test cases/vala/19 genie/meson.build create mode 100644 meson/test cases/vala/19 genie/prog.gs create mode 100644 meson/test cases/vala/2 multiple files/class1.vala create mode 100644 meson/test cases/vala/2 multiple files/class2.vala create mode 100644 meson/test cases/vala/2 multiple files/main.vala create mode 100644 meson/test cases/vala/2 multiple files/meson.build create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/c_test_one.c create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/c_test_two.c create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/init.gs create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/meson.build create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/test_one.gs create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/test_two.gs create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/vala_test_one.vala create mode 100644 meson/test cases/vala/20 genie multiple mixed sources/vala_test_two.vala create mode 100644 meson/test cases/vala/21 type module/foo.vala create mode 100644 meson/test cases/vala/21 type module/meson.build create mode 100644 meson/test cases/vala/21 type module/plugin-bar.vala create mode 100644 meson/test cases/vala/21 type module/plugin-module.vala create mode 100644 meson/test cases/vala/21 type module/plugin.vala create mode 100644 meson/test cases/vala/22 same target in directories/Subdir/Subdir2/Test.vala create mode 100644 meson/test cases/vala/22 same target in directories/Subdir/Test.vala create mode 100644 meson/test cases/vala/22 same target in directories/Subdir2/Test.vala create mode 100644 meson/test cases/vala/22 same target in directories/Test.vala create mode 100644 meson/test cases/vala/22 same target in directories/meson.build create mode 100644 meson/test cases/vala/22 same target in directories/prog.vala create mode 100644 meson/test cases/vala/23 thread flags/meson.build create mode 100644 meson/test cases/vala/23 thread flags/prog.vala create mode 100644 meson/test cases/vala/24 export dynamic shared module/app.vala create mode 100644 meson/test cases/vala/24 export dynamic shared module/meson.build create mode 100644 meson/test cases/vala/24 export dynamic shared module/module.vala create mode 100644 meson/test cases/vala/25 extract_all_objects/meson.build create mode 100644 meson/test cases/vala/25 extract_all_objects/prog.vala create mode 100644 meson/test cases/vala/26 vala and asm/meson.build create mode 100644 meson/test cases/vala/26 vala and asm/prog.vala create mode 100644 meson/test cases/vala/26 vala and asm/retval-arm.S create mode 100644 meson/test cases/vala/26 vala and asm/retval-x86.S create mode 100644 meson/test cases/vala/26 vala and asm/retval-x86_64.S create mode 100644 meson/test cases/vala/26 vala and asm/symbol-underscore.h create mode 100644 meson/test cases/vala/3 dep/gioprog.vala create mode 100644 meson/test cases/vala/3 dep/meson.build create mode 100644 meson/test cases/vala/4 config/config.vapi create mode 100644 meson/test cases/vala/4 config/meson-something-else.vapi create mode 100644 meson/test cases/vala/4 config/meson.build create mode 100644 meson/test cases/vala/4 config/prog.vala create mode 100644 meson/test cases/vala/5 target glib/GLib.Thread.vala create mode 100644 meson/test cases/vala/5 target glib/meson.build create mode 100644 meson/test cases/vala/5 target glib/retcode.c create mode 100644 meson/test cases/vala/6 static library/meson.build create mode 100644 meson/test cases/vala/6 static library/mylib.vala create mode 100644 meson/test cases/vala/6 static library/prog.vala create mode 100644 meson/test cases/vala/6 static library/test.json create mode 100644 meson/test cases/vala/7 shared library/lib/meson.build create mode 100644 meson/test cases/vala/7 shared library/lib/mylib.vala create mode 100644 meson/test cases/vala/7 shared library/meson.build create mode 100644 meson/test cases/vala/7 shared library/prog/meson.build create mode 100644 meson/test cases/vala/7 shared library/prog/prog.vala create mode 100644 meson/test cases/vala/7 shared library/test.json create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/enum-types.c.template create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/enum-types.h.template create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/enums.h create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/lib.vala create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/main.vala create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/meson.build create mode 100644 meson/test cases/vala/8 generated sources/dependency-generated/null.c create mode 100644 meson/test cases/vala/8 generated sources/meson.build create mode 100644 meson/test cases/vala/8 generated sources/onlygen/maingen.in create mode 100644 meson/test cases/vala/8 generated sources/onlygen/meson.build create mode 100644 meson/test cases/vala/8 generated sources/src/config.vala.in create mode 100644 meson/test cases/vala/8 generated sources/src/copy_file.py create mode 100644 meson/test cases/vala/8 generated sources/src/meson.build create mode 100644 meson/test cases/vala/8 generated sources/src/returncode.in create mode 100644 meson/test cases/vala/8 generated sources/src/test.vala create mode 100644 meson/test cases/vala/8 generated sources/src/write_wrapper.py create mode 100644 meson/test cases/vala/8 generated sources/test.json create mode 100644 meson/test cases/vala/8 generated sources/tools/meson.build create mode 100644 meson/test cases/vala/9 gir/foo.vala create mode 100644 meson/test cases/vala/9 gir/meson.build create mode 100644 meson/test cases/vala/9 gir/test.json create mode 100644 meson/test cases/warning/1 version for string div/a/b.c create mode 100644 meson/test cases/warning/1 version for string div/meson.build create mode 100644 meson/test cases/warning/1 version for string div/test.json create mode 100644 meson/test cases/warning/2 languages missing native/meson.build create mode 100644 meson/test cases/warning/2 languages missing native/test.json create mode 100644 meson/test cases/warning/3 fallback consistency/meson.build create mode 100644 meson/test cases/warning/3 fallback consistency/subprojects/sub/meson.build create mode 100644 meson/test cases/warning/3 fallback consistency/test.json create mode 100644 meson/test cases/warning/4 fallback consistency/meson.build create mode 100644 meson/test cases/warning/4 fallback consistency/subprojects/sub/meson.build create mode 100644 meson/test cases/warning/4 fallback consistency/test.json create mode 100644 meson/test cases/warning/5 fallback consistency/meson.build create mode 100644 meson/test cases/warning/5 fallback consistency/subprojects/foo.wrap create mode 100644 meson/test cases/warning/5 fallback consistency/subprojects/foo/meson.build create mode 100644 meson/test cases/warning/5 fallback consistency/test.json create mode 100644 meson/test cases/wasm/1 basic/hello.c create mode 100644 meson/test cases/wasm/1 basic/hello.cpp create mode 100644 meson/test cases/wasm/1 basic/hello.html create mode 100644 meson/test cases/wasm/1 basic/meson.build create mode 100644 meson/test cases/wasm/2 threads/meson.build create mode 100644 meson/test cases/wasm/2 threads/threads.c create mode 100644 meson/test cases/wasm/2 threads/threads.cpp create mode 100644 meson/test cases/windows/1 basic/meson.build create mode 100644 meson/test cases/windows/1 basic/prog.c create mode 100644 meson/test cases/windows/1 basic/test.json create mode 100644 meson/test cases/windows/10 vs module defs generated custom target/meson.build create mode 100644 meson/test cases/windows/10 vs module defs generated custom target/prog.c create mode 100755 meson/test cases/windows/10 vs module defs generated custom target/subdir/make_def.py create mode 100644 meson/test cases/windows/10 vs module defs generated custom target/subdir/meson.build create mode 100644 meson/test cases/windows/10 vs module defs generated custom target/subdir/somedll.c create mode 100644 meson/test cases/windows/11 exe implib/meson.build create mode 100644 meson/test cases/windows/11 exe implib/prog.c create mode 100644 meson/test cases/windows/11 exe implib/test.json create mode 100644 meson/test cases/windows/12 resources with custom targets/meson.build create mode 100644 meson/test cases/windows/12 resources with custom targets/prog.c create mode 100755 meson/test cases/windows/12 resources with custom targets/res/gen-res.py create mode 100644 meson/test cases/windows/12 resources with custom targets/res/meson.build create mode 100644 meson/test cases/windows/12 resources with custom targets/res/myres.rc.in create mode 100644 meson/test cases/windows/12 resources with custom targets/res/myres_static.rc create mode 100644 meson/test cases/windows/12 resources with custom targets/res/resource.h create mode 100644 meson/test cases/windows/12 resources with custom targets/res/sample.ico create mode 100644 meson/test cases/windows/13 test argument extra paths/exe/main.c create mode 100644 meson/test cases/windows/13 test argument extra paths/exe/meson.build create mode 100644 meson/test cases/windows/13 test argument extra paths/lib/foo.c create mode 100644 meson/test cases/windows/13 test argument extra paths/lib/foo.h create mode 100644 meson/test cases/windows/13 test argument extra paths/lib/meson.build create mode 100644 meson/test cases/windows/13 test argument extra paths/meson.build create mode 100644 meson/test cases/windows/13 test argument extra paths/test/meson.build create mode 100644 meson/test cases/windows/13 test argument extra paths/test/test_run_exe.py create mode 100755 meson/test cases/windows/14 resources with custom target depend_files/ico/gen-ico.py create mode 100644 meson/test cases/windows/14 resources with custom target depend_files/ico/meson.build create mode 100644 meson/test cases/windows/14 resources with custom target depend_files/ico/sample.ico.in create mode 100644 meson/test cases/windows/14 resources with custom target depend_files/meson.build create mode 100644 meson/test cases/windows/14 resources with custom target depend_files/prog.c create mode 100644 meson/test cases/windows/14 resources with custom target depend_files/res/meson.build create mode 100644 meson/test cases/windows/14 resources with custom target depend_files/res/myres.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/a/meson.build create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/a/rsrc.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/b/meson.build create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/b/rsrc.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/c/meson.build create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/c/rsrc.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe3/meson.build create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_dll/main.c create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_dll/version.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_exe/main.c create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_exe/version.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe4/meson.build create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_dll/main.c create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_dll/version.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_exe/main.c create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_exe/version.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/meson.build create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/rsrc.rc create mode 100644 meson/test cases/windows/15 resource scripts with duplicate filenames/verify.c create mode 100644 meson/test cases/windows/16 gui app/console_prog.c create mode 100644 meson/test cases/windows/16 gui app/dummy.c create mode 100644 meson/test cases/windows/16 gui app/gui_app_tester.py create mode 100644 meson/test cases/windows/16 gui app/gui_prog.c create mode 100644 meson/test cases/windows/16 gui app/meson.build create mode 100644 meson/test cases/windows/17 msvc ndebug/main.cpp create mode 100644 meson/test cases/windows/17 msvc ndebug/meson.build create mode 100644 meson/test cases/windows/2 winmain/meson.build create mode 100644 meson/test cases/windows/2 winmain/prog.c create mode 100644 meson/test cases/windows/3 cpp/meson.build create mode 100644 meson/test cases/windows/3 cpp/prog.cpp create mode 100644 meson/test cases/windows/4 winmaincpp/meson.build create mode 100644 meson/test cases/windows/4 winmaincpp/prog.cpp create mode 100644 meson/test cases/windows/5 resources/inc/meson.build create mode 100644 meson/test cases/windows/5 resources/inc/resource/resource.h create mode 100644 meson/test cases/windows/5 resources/meson.build create mode 100644 meson/test cases/windows/5 resources/prog.c create mode 100644 meson/test cases/windows/5 resources/res/dummy.c create mode 100644 meson/test cases/windows/5 resources/res/meson.build create mode 100644 meson/test cases/windows/5 resources/res/myres.rc create mode 100644 meson/test cases/windows/5 resources/res/sample.ico create mode 100644 meson/test cases/windows/6 vs module defs/meson.build create mode 100644 meson/test cases/windows/6 vs module defs/prog.c create mode 100644 meson/test cases/windows/6 vs module defs/subdir/meson.build create mode 100644 meson/test cases/windows/6 vs module defs/subdir/somedll.c create mode 100644 meson/test cases/windows/6 vs module defs/subdir/somedll.def create mode 100644 meson/test cases/windows/7 dll versioning/copyfile.py create mode 100644 meson/test cases/windows/7 dll versioning/exe.orig.c create mode 100644 meson/test cases/windows/7 dll versioning/lib.c create mode 100644 meson/test cases/windows/7 dll versioning/meson.build create mode 100644 meson/test cases/windows/7 dll versioning/test.json create mode 100644 meson/test cases/windows/8 find program/meson.build create mode 100644 meson/test cases/windows/8 find program/test-script create mode 100644 meson/test cases/windows/8 find program/test-script-ext.py create mode 100644 meson/test cases/windows/9 vs module defs generated/meson.build create mode 100644 meson/test cases/windows/9 vs module defs generated/prog.c create mode 100644 meson/test cases/windows/9 vs module defs generated/subdir/meson.build create mode 100644 meson/test cases/windows/9 vs module defs generated/subdir/somedll.c create mode 100644 meson/test cases/windows/9 vs module defs generated/subdir/somedll.def.in create mode 100755 meson/tools/ac_converter.py create mode 100755 meson/tools/boost_names.py create mode 100755 meson/tools/build_website.py create mode 100755 meson/tools/cmake2meson.py create mode 100644 meson/tools/copy_files.py create mode 100755 meson/tools/dircondenser.py create mode 100755 meson/tools/gen_data.py create mode 100755 meson/tools/regenerate_docs.py create mode 100755 meson/tools/run_with_cov.py (limited to 'meson') diff --git a/meson/.editorconfig b/meson/.editorconfig new file mode 100644 index 000000000..c2dd5d06a --- /dev/null +++ b/meson/.editorconfig @@ -0,0 +1,27 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = space + +[*.[ch]] +indent_size = 4 + +[*.cpp] +indent_size = 4 + +[*.py] +indent_size = 4 + +[*.vala] +indent_size = 4 + +[*.xml] +indent_size = 2 + +[meson.build] +indent_size = 2 + +[*.json] +indent_size = 2 diff --git a/meson/.flake8 b/meson/.flake8 new file mode 100644 index 000000000..61876015f --- /dev/null +++ b/meson/.flake8 @@ -0,0 +1,31 @@ +[flake8] +ignore = + # E241: multiple spaces after ':' + E241, + # E251: unexpected spaces around keyword / parameter equals + E251, + # E261: at least two spaces before inline comment + E261, + # E265: block comment should start with '# ' + E265, + # E501: line too long + E501, + # E302: expected 2 blank lines, found 1 + E302, + # E305: expected 2 blank lines after class or function definition, found 1 + E305, + # E401: multiple imports on one line + E401, + # E266: too many leading '#' for block comment + E266, + # E402: module level import not at top of file + E402, + # E731: do not assign a lambda expression, use a def (too many false positives) + E731, + # E741: ambiguous variable name 'l' + E741, + # W504: line break after binary operator + W504, + # A003: builtin class attribute + A003 +max-line-length = 120 diff --git a/meson/.gitattributes b/meson/.gitattributes new file mode 100644 index 000000000..7fd80e241 --- /dev/null +++ b/meson/.gitattributes @@ -0,0 +1,5 @@ +.gitignore export-ignore +.gitattributes export-ignore +* text eol=lf +*.png binary +*.icns binary diff --git a/meson/.github/ISSUE_TEMPLATE/bug_report.md b/meson/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..4bc3fe6e0 --- /dev/null +++ b/meson/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,25 @@ +--- +name: Bug report +about: Meson bug report +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Please include your `meson.build` files, preferably as a minimal toy example showing the issue. +You may need to create simple source code files (don't include private/proprietary code). + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**system parameters** +* Is this a [cross build](https://mesonbuild.com/Cross-compilation.html) or just a plain native build (for the same computer)? +* what operating system (e.g. MacOS Catalina, Windows 10, CentOS 8.0, Ubuntu 18.04, etc.) +* what Python version are you using e.g. 3.8.0 +* what `meson --version` +* what `ninja --version` if it's a Ninja build diff --git a/meson/.github/codecov.yml b/meson/.github/codecov.yml new file mode 100644 index 000000000..bfdc9877d --- /dev/null +++ b/meson/.github/codecov.yml @@ -0,0 +1,8 @@ +coverage: + status: + project: + default: + informational: true + patch: + default: + informational: true diff --git a/meson/.github/workflows/cygwin.yml b/meson/.github/workflows/cygwin.yml new file mode 100644 index 000000000..224f82d41 --- /dev/null +++ b/meson/.github/workflows/cygwin.yml @@ -0,0 +1,96 @@ +name: cygwin + +concurrency: + group: cygwin-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/cygwin.yml" + - "run*tests.py" + pull_request: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/cygwin.yml" + - "run*tests.py" + +jobs: + test: + runs-on: windows-latest + name: ${{ matrix.NAME }} + strategy: + fail-fast: false + matrix: + include: + - NAME: gccx64ninja + ARCH: x86_64 + + steps: + # cache should be saved on failure, but the action doesn't support that + # https://github.com/actions/cache/issues/92 + - uses: actions/cache@v1 + with: + # should use 'pip3 cache dir' to discover this path + path: C:\cygwin\home\runneradmin\.cache\pip + key: cygwin-pip-${{ github.run_number }} + restore-keys: cygwin-pip- + + - run: git config --global core.autocrlf input + + - uses: actions/checkout@v2 + + - uses: cygwin/cygwin-install-action@master + with: + platform: ${{ matrix.ARCH }} + packages: | + cmake + gcc-fortran + gcc-objc++ + gcc-objc + git + gobject-introspection + gtk-doc + libboost-devel + libglib2.0-devel + libgtk3-devel + libxml2-devel + libxslt-devel + ninja + python2-devel + python3-devel + python3-libxml2 + python3-libxslt + python38-pip + python38-wheel + vala + zlib-devel + + - name: Run pip + run: | + export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32 + python3 -m pip --disable-pip-version-check install gcovr jsonschema pefile pytest pytest-xdist coverage codecov + shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}' + + - name: Run tests + run: | + export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32 + python3 ./tools/run_with_cov.py run_tests.py --backend=ninja + env: + # Cygwin's static boost installation is broken (some static library + # variants such as boost_thread are not present) + SKIP_STATIC_BOOST: 1 + shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}' + + - uses: actions/upload-artifact@v2 + with: + name: ${{ matrix.NAME }} + path: meson-test-run.* + # test log should be saved on failure + if: ${{ !cancelled() }} + + - name: Upload coverage report + run: ./ci/upload_cov.sh "${{ matrix.NAME }}" diff --git a/meson/.github/workflows/file_format.yml b/meson/.github/workflows/file_format.yml new file mode 100644 index 000000000..278fb297d --- /dev/null +++ b/meson/.github/workflows/file_format.yml @@ -0,0 +1,17 @@ +name: File format check + +on: [push, pull_request] + +concurrency: + group: file_fmt-${{ github.head_ref }} + cancel-in-progress: true + +jobs: + format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + - run: python3 ./run_format_tests.py diff --git a/meson/.github/workflows/images.yml b/meson/.github/workflows/images.yml new file mode 100644 index 000000000..9c11f4d2d --- /dev/null +++ b/meson/.github/workflows/images.yml @@ -0,0 +1,62 @@ +name: CI image builder + +concurrency: + group: img_builder-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + branches: + - master + paths: + - 'ci/ciimage/**' + - '.github/workflows/images.yml' + + pull_request: + branches: + - master + paths: + - 'ci/ciimage/**' + - '.github/workflows/images.yml' + + # Rebuild the images every week (Sunday) + schedule: + - cron: '0 0 * * 0' + +jobs: + build: + env: + HAS_DOCKER_CREDENTIALS: ${{ secrets.DOCKER_PASSWORD != '' }} + name: ${{ matrix.cfg.name }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + cfg: + - { name: Arch Linux, id: arch } + - { name: CUDA (on Arch), id: cuda } + - { name: Fedora, id: fedora } + - { name: OpenSUSE, id: opensuse } + - { name: Ubuntu Bionic, id: bionic } + - { name: Ubuntu Rolling, id: ubuntu-rolling } + steps: + - uses: actions/checkout@v2 + + # Login to dockerhub + - name: Docker login + if: (github.event_name == 'push' || github.event_name == 'schedule') && env.HAS_DOCKER_CREDENTIALS == 'true' + uses: azure/docker-login@v1 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + # Build and test + - name: Building the ${{ matrix.cfg.id }} image + run: ./ci/ciimage/build.py -t build ${{ matrix.cfg.id }} + - name: Testing the ${{ matrix.cfg.id }} image + run: ./ci/ciimage/build.py -t test ${{ matrix.cfg.id }} + + # Publish + - name: Push the ${{ matrix.cfg.id }} image + run: docker push mesonbuild/${{ matrix.cfg.id }} + if: (github.event_name == 'push' || github.event_name == 'schedule') && env.HAS_DOCKER_CREDENTIALS == 'true' diff --git a/meson/.github/workflows/lint_mypy.yml b/meson/.github/workflows/lint_mypy.yml new file mode 100644 index 000000000..195d733de --- /dev/null +++ b/meson/.github/workflows/lint_mypy.yml @@ -0,0 +1,47 @@ +name: LintMypy + +concurrency: + group: mypy-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + paths: + - "**.py" + - ".github/workflows/lint_mypy.yml" + pull_request: + paths: + - "**.py" + - ".github/workflows/lint_mypy.yml" + +jobs: + + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + # pylint version constraint can be removed when https://github.com/PyCQA/pylint/issues/3524 is resolved + - run: python -m pip install pylint==2.4.4 + - run: pylint mesonbuild + + custom_lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + - run: python ./run_custom_lint.py + + mypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + - run: python -m pip install mypy + - run: python run_mypy.py diff --git a/meson/.github/workflows/macos.yml b/meson/.github/workflows/macos.yml new file mode 100644 index 000000000..90fc56b61 --- /dev/null +++ b/meson/.github/workflows/macos.yml @@ -0,0 +1,98 @@ +name: macos + +concurrency: + group: macos-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/macos.yml" + - "run_unittests.py" + pull_request: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/macos.yml" + - "run_unittests.py" + +jobs: + unittests-appleclang: + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + - run: | + python -m pip install --upgrade pip + python -m pip install pytest pytest-xdist jsonschema coverage codecov + - run: brew install pkg-config ninja llvm qt@5 + - env: + CPPFLAGS: "-I/usr/local/include" + LDFLAGS: "-L/usr/local/lib" + MESON_UNIT_TEST_BACKEND: ninja + # These cannot evaluate anything, so we cannot set PATH or SDKROOT here + run: | + export SDKROOT="$(xcodebuild -version -sdk macosx Path)" + export PATH="$HOME/tools:/usr/local/opt/qt@5/bin:$PATH:$(brew --prefix llvm)/bin" + export PKG_CONFIG_PATH="/usr/local/opt/qt@5/lib/pkgconfig:$PKG_CONFIG_PATH" + ./tools/run_with_cov.py ./run_unittests.py + - name: Upload coverage report + run: ./ci/upload_cov.sh "appleclang [unit tests]" + + + project-tests-appleclang: + runs-on: macos-latest + strategy: + matrix: + unity: ["on", "off"] + steps: + - uses: actions/checkout@v2 + # use python3 from homebrew because it is a valid framework, unlike the actions one: + # https://github.com/actions/setup-python/issues/58 + - run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 python3 + - run: | + python3 -m pip install --upgrade setuptools + python3 -m pip install --upgrade pip + python3 -m pip install cython coverage codecov + - env: + CPPFLAGS: "-I/usr/local/include" + LDFLAGS: "-L/usr/local/lib" + MESON_ARGS: --unity=${{ matrix.unity }} + CI: 1 + # These cannot evaluate anything, so we cannot set PATH or SDKROOT here + run: | + export SDKROOT="$(xcodebuild -version -sdk macosx Path)" + export PATH="$HOME/tools:/usr/local/opt/qt@5/bin:$PATH:$(brew --prefix llvm)/bin" + export PKG_CONFIG_PATH="/usr/local/opt/qt@5/lib/pkgconfig:$PKG_CONFIG_PATH" + ./tools/run_with_cov.py ./run_project_tests.py --backend=ninja + - name: Upload coverage report + run: ./ci/upload_cov.sh "appleclang [project tests; unity=${{ matrix.unity }}]" + + Qt4macos: + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + - run: python -m pip install -e . + - run: brew install pkg-config ninja gcc + - run: brew tap cartr/qt4 + - run: brew install qt@4 + - run: meson setup "test cases/frameworks/4 qt" build -Drequired=qt4 + - run: meson compile -C build + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: Qt4_Mac_build + path: build/meson-logs/meson-log.txt + - run: meson test -C build -v + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: Qt4_Mac_test + path: build/meson-logs/testlog.txt diff --git a/meson/.github/workflows/msys2.yml b/meson/.github/workflows/msys2.yml new file mode 100644 index 000000000..f0cfa5d28 --- /dev/null +++ b/meson/.github/workflows/msys2.yml @@ -0,0 +1,104 @@ +name: msys2 + +concurrency: + group: msys2-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/msys2.yml" + - "run_unittests.py" + pull_request: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/msys2.yml" + - "run_unittests.py" + +jobs: + test: + runs-on: windows-2019 + name: ${{ matrix.NAME }} + strategy: + fail-fast: false + matrix: + include: + - NAME: gccx86ninja + MSYSTEM: MINGW32 + MSYS2_ARCH: i686 + MSYS2_CURSES: ncurses + COMPILER: gcc + TOOLCHAIN: toolchain + - NAME: gccx64ninja + MSYSTEM: MINGW64 + MSYS2_ARCH: x86_64 + MSYS2_CURSES: pdcurses + COMPILER: gcc + TOOLCHAIN: toolchain + - NAME: clangx64ninja + MSYSTEM: MINGW64 + MSYS2_ARCH: x86_64 + MSYS2_CURSES: + COMPILER: clang + TOOLCHAIN: clang + + defaults: + run: + shell: msys2 {0} + + steps: + - uses: actions/checkout@v2 + + - uses: msys2/setup-msys2@v2 + with: + msystem: ${{ matrix.MSYSTEM }} + update: true + install: >- + base-devel + git + mercurial + lcov + mingw-w64-${{ matrix.MSYS2_ARCH }}-cmake + mingw-w64-${{ matrix.MSYS2_ARCH }}-glib2 + mingw-w64-${{ matrix.MSYS2_ARCH }}-libxml2 + mingw-w64-${{ matrix.MSYS2_ARCH }}-ninja + mingw-w64-${{ matrix.MSYS2_ARCH }}-pkg-config + mingw-w64-${{ matrix.MSYS2_ARCH }}-python2 + mingw-w64-${{ matrix.MSYS2_ARCH }}-python + mingw-w64-${{ matrix.MSYS2_ARCH }}-python-lxml + mingw-w64-${{ matrix.MSYS2_ARCH }}-python-setuptools + mingw-w64-${{ matrix.MSYS2_ARCH }}-python-pip + mingw-w64-${{ matrix.MSYS2_ARCH }}-${{ matrix.TOOLCHAIN }} + + - name: Install dependencies + run: | + python3 -m pip --disable-pip-version-check install gcovr jsonschema pefile pytest pytest-xdist coverage codecov + + - name: Run Tests + run: | + export BOOST_ROOT= + export PATHEXT="$PATHEXT;.py" + + if [[ '${{ matrix.COMPILER }}' == 'clang' ]]; then + export CC=clang + export CXX=clang++ + export OBJC=clang + export OBJCXX=clang++ + fi + + if [[ "${{ matrix.MSYS2_CURSES }}" != "" ]]; then + pacman --noconfirm --needed -S mingw-w64-${{ matrix.MSYS2_ARCH }}-${{ matrix.MSYS2_CURSES }} + fi + + MSYSTEM= python3 ./tools/run_with_cov.py run_tests.py --backend=ninja + + - uses: actions/upload-artifact@v2 + with: + name: ${{ matrix.NAME }} + path: meson-test-run.* + + - name: Upload coverage report + run: ./ci/upload_cov.sh "${{ matrix.NAME }}" diff --git a/meson/.github/workflows/nonative.yml b/meson/.github/workflows/nonative.yml new file mode 100644 index 000000000..4adffbe14 --- /dev/null +++ b/meson/.github/workflows/nonative.yml @@ -0,0 +1,38 @@ +name: Cross-only compilation environment + +concurrency: + group: nonative-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + branches: + - master + # Stable branches such as 0.56 or the eventual 1.0 + - '[0-9]+.[0-9]+' + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/nonative.yml" + - "run*tests.py" + pull_request: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/nonative.yml" + - "run*tests.py" + +jobs: + cross-only-armhf: + runs-on: ubuntu-latest + container: mesonbuild/eoan:latest + steps: + - run: | + apt-get -y purge clang gcc gdc + apt-get -y autoremove + python3 -m pip install coverage codecov + - uses: actions/checkout@v2 + - name: Run tests + run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only' + - name: Upload coverage report + run: ./ci/upload_cov.sh "Ubuntu nonnative" diff --git a/meson/.github/workflows/os_comp.yml b/meson/.github/workflows/os_comp.yml new file mode 100644 index 000000000..fd716ce8d --- /dev/null +++ b/meson/.github/workflows/os_comp.yml @@ -0,0 +1,129 @@ +name: linux + +concurrency: + group: linux-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + branches: + - master + # Stable branches such as 0.56 or the eventual 1.0 + - '[0-9]+.[0-9]+' + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/images.yml" + - ".github/workflows/os_comp.yml" + - "run_unittests.py" + pull_request: + paths: + - "mesonbuild/**" + - "test cases/**" + - ".github/workflows/images.yml" + - ".github/workflows/os_comp.yml" + - "run_unittests.py" + +jobs: + arch: + name: ${{ matrix.cfg.name }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + cfg: + - { name: Arch Linux, id: arch } + - { name: CUDA (on Arch), id: cuda } + - { name: Fedora, id: fedora } + - { name: OpenSUSE, id: opensuse } + - { name: Ubuntu Bionic, id: bionic } + container: mesonbuild/${{ matrix.cfg.id }}:latest + env: + MESON_CI_JOBNAME: linux-${{ matrix.cfg.id }}-gcc + + steps: + - uses: actions/checkout@v2 + - name: Run tests + # All environment variables are stored inside the docker image in /ci/env_vars.sh + # They are defined in the `env` section in each image.json. CI_ARGS should be set + # via the `args` array ub the image.json + run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS' + - name: Upload coverage report + run: ./ci/upload_cov.sh "OS Comp [${{ matrix.cfg.name }}]" + + ubuntu-rolling: + name: 'Ubuntu Rolling' + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + cfg: + - CC: 'gcc' + CXX: 'g++' + - MESON_ARGS: '--unity=on -Ddefault_library=static' + RUN_TESTS_ARGS: '--no-unittests' + CC: 'gcc' + CXX: 'g++' + - MESON_ARGS: '-Ddefault_library=both' + RUN_TESTS_ARGS: '--no-unittests' + CC: 'gcc' + CXX: 'g++' + - CC: 'clang' + CXX: 'clang++' + - MESON_ARGS: '--unity=on' + RUN_TESTS_ARGS: '--no-unittests' + CC: 'clang' + CXX: 'clang++' + - RUN_TESTS_ARGS: '--cross ubuntu-armhf.json --cross linux-mingw-w64-64bit.json' + MESON_RSP_THRESHOLD: '0' + CC: 'gcc' + CXX: 'g++' + - RUN_TESTS_ARGS: '--cross ubuntu-armhf.json --cross linux-mingw-w64-64bit.json' + MESON_ARGS: '--unity=on' + CC: 'gcc' + CXX: 'g++' + + env: + MESON_CI_JOBNAME_UPDATE: linux-${{ github.job }}-${{ matrix.cfg.CC }} + + container: + image: mesonbuild/ubuntu-rolling + env: + MESON_RSP_THRESHOLD: ${{ matrix.cfg.MESON_RSP_THRESHOLD }} + MESON_ARGS: ${{ matrix.cfg.MESON_ARGS }} + RUN_TESTS_ARGS: ${{ matrix.cfg.RUN_TESTS_ARGS }} + CC: ${{ matrix.cfg.CC }} + CXX: ${{ matrix.cfg.CXX }} + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Run tests + shell: bash + run: | + export HOME=/root + + export CC=$CC + export CXX=$CXX + export OBJC=$CC + export OBJCXX=$CXX + export PATH=$(pwd)/tools:$PATH + if test "$MESON_RSP_THRESHOLD" != "" + then + export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD + fi + + source /ci/env_vars.sh + export MESON_CI_JOBNAME=$MESON_CI_JOBNAME_UPDATE + + update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix + update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix + update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix + update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix + + ./tools/run_with_cov.py ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS + + - name: Upload coverage report + run: ./ci/upload_cov.sh "Ubuntu [${{ matrix.cfg.CC }} ${{ matrix.cfg.RUN_TESTS_ARGS }} ${{ matrix.cfg.MESON_ARGS }}]" diff --git a/meson/.github/workflows/unusedargs_missingreturn.yml b/meson/.github/workflows/unusedargs_missingreturn.yml new file mode 100644 index 000000000..f5cf76c23 --- /dev/null +++ b/meson/.github/workflows/unusedargs_missingreturn.yml @@ -0,0 +1,74 @@ +name: UnusedMissingReturn +# this workflow checks for +# * unused input arguments +# * missing return values +# * strict prototypes +# some users have default configs that will needlessly fail Meson self-tests due to these syntax. +env: + CFLAGS: "-Werror=unused-parameter -Werror=return-type -Werror=strict-prototypes" + CPPFLAGS: "-Werror=unused-parameter -Werror=return-type" + FFLAGS: "-fimplicit-none" + +concurrency: + group: unusedargs-${{ github.head_ref }} + cancel-in-progress: true + +on: + push: + paths: + - ".github/workflows/unusedargs_missingreturn.yml" + - "test cases/cmake/**" + - "test cases/common/**" + - "test cases/fortran/**" + - "test cases/linuxlike/**" + - "test cases/objc/**" + - "test cases/objcpp/**" + - "test caes/windows/**" + + pull_request: + paths: + - ".github/workflows/unusedargs_missingreturn.yml" + - "test cases/cmake/**" + - "test cases/common/**" + - "test cases/fortran/**" + - "test cases/linuxlike/**" + - "test cases/objc/**" + - "test cases/objcpp/**" + - "test caes/windows/**" + +jobs: + + linux: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + - name: Install Compilers + run: | + sudo apt update -yq + sudo apt install -yq --no-install-recommends g++ gfortran ninja-build gobjc gobjc++ + python -m pip install coverage codecov + - run: ./tools/run_with_cov.py run_project_tests.py --only cmake common fortran platform-linux "objective c" "objective c++" + - name: Upload coverage report + run: ./ci/upload_cov.sh "UnusedMissingReturn" + + windows: + runs-on: windows-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + + - run: pip install ninja pefile coverage codecov + + - run: python ./tools/run_with_cov.py run_project_tests.py --only platform-windows + env: + CC: gcc + CXX: g++ + FC: gfortran + + - name: Upload coverage report + run: ./ci/upload_cov.sh "UnusedMissingReturn Windows" diff --git a/meson/.github/workflows/website.yml b/meson/.github/workflows/website.yml new file mode 100644 index 000000000..efb7fed7e --- /dev/null +++ b/meson/.github/workflows/website.yml @@ -0,0 +1,47 @@ +name: Update website + +concurrency: + group: mesonbuild.com + # We do NOT want `cancel-in-progress` here since only one website job + # should run at a time to avoid upload race condtions. + +on: + push: + branches: + - master + paths: + - docs/** + workflow_dispatch: + +jobs: + update_website: + env: + HAS_SSH_KEY: ${{ secrets.WEBSITE_PRIV_KEY != '' }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install package + run: | + sudo apt-get -y install python3-pip ninja-build libjson-glib-dev + pip install meson hotdoc + - name: Setup SSH Keys and known_hosts + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + run: | + ssh-agent -a $SSH_AUTH_SOCK > /dev/null + ssh-add - <<< "${{ secrets.WEBSITE_PRIV_KEY }}" + if: env.HAS_SSH_KEY == 'true' + - name: Build website + run: | + git config --global user.name "github-actions" + git config --global user.email "github-actions@github.com" + cd docs + meson setup _build + ninja -C _build + - name: Update website + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + run: | + cd docs + ninja -C _build upload + if: env.HAS_SSH_KEY == 'true' diff --git a/meson/.gitignore b/meson/.gitignore new file mode 100644 index 000000000..8ff5e7869 --- /dev/null +++ b/meson/.gitignore @@ -0,0 +1,34 @@ +.mypy_cache/ +.pytest_cache/ +/.project +/.pydevproject +/.settings +/.cproject +/.idea +/.vscode + +__pycache__ +/.coverage/ +/.coveragerc +/install dir +/work area + +/meson-test-run.txt +/meson-test-run.xml +/meson-cross-test-run.txt +/meson-cross-test-run.xml + +.DS_Store +*~ +*.swp +packagecache +/MANIFEST +/build +/dist +/meson.egg-info + +/docs/built_docs +/docs/hotdoc-private* + +*.pyc +/*venv* diff --git a/meson/.lgtm.yml b/meson/.lgtm.yml new file mode 100644 index 000000000..62d8cb420 --- /dev/null +++ b/meson/.lgtm.yml @@ -0,0 +1,4 @@ +extraction: + python: + python_setup: + version: 3 \ No newline at end of file diff --git a/meson/.mailmap b/meson/.mailmap new file mode 100644 index 000000000..8298afb0b --- /dev/null +++ b/meson/.mailmap @@ -0,0 +1,10 @@ +Alexandre Foley AlexandreFoley +Igor Gnatenko Igor Gnatenko +Jussi Pakkanen Jussi Pakkanen +Jussi Pakkanen jpakkane +Nirbheek Chauhan Nirbheek Chauhan +Nicolas Schneider Nicolas Schneider +Patrick Griffis TingPing +Thibault Saunier Thibault Saunier +Thibault Saunier Saunier Thibault +Tim-Philipp Müller Tim-Philipp Müller diff --git a/meson/.mypy.ini b/meson/.mypy.ini new file mode 100644 index 000000000..2ee1e5973 --- /dev/null +++ b/meson/.mypy.ini @@ -0,0 +1,23 @@ +[mypy] +strict_optional = False +show_error_context = False +show_column_numbers = True +ignore_missing_imports = True + +follow_imports = silent +warn_redundant_casts = True +warn_unused_ignores = True +warn_return_any = True +# warn_unreachable = True +disallow_untyped_calls = True +disallow_untyped_defs = True +disallow_incomplete_defs = True +disallow_untyped_decorators = True +no_implicit_optional = True +strict_equality = True +check_untyped_defs = True +# disallow_any_expr = True +# disallow_any_decorated = True +# disallow_any_explicit = True +# disallow_any_generics = True +# disallow_subclassing_any = True diff --git a/meson/.pylintrc b/meson/.pylintrc new file mode 100644 index 000000000..bfa6d2fc7 --- /dev/null +++ b/meson/.pylintrc @@ -0,0 +1,26 @@ +[MASTER] +jobs=0 + +[REPORTS] +score=no + +[MESSAGES CONTROL] +disable=all +enable= + abstract-class-instantiated, + assert-on-tuple, + bad-indentation, + compare-to-zero, + dangerous-default-value, + deprecated-lambda, + len-as-condition, + literal-comparison, + missing-kwoa, + mixed-indentation, + no-value-for-parameter, + redundant-keyword-arg, + singleton-comparison, + too-many-function-args, + unexpected-keyword-arg, + unreachable, + bare-except diff --git a/meson/CODEOWNERS b/meson/CODEOWNERS new file mode 100644 index 000000000..b051ece9d --- /dev/null +++ b/meson/CODEOWNERS @@ -0,0 +1,9 @@ +* @jpakkane +/mesonbuild/modules/pkgconfig.py @xclaesse +/mesonbuild/modules/cmake.py @mensinda +/mesonbuild/modules/unstable_external_project.py @xclaesse +/mesonbuild/modules/unstable_rust.py @dcbaker +/mesonbuild/ast/ @mensinda +/mesonbuild/cmake/ @mensinda +/mesonbuild/compilers/ @dcbaker +/mesonbuild/linkers.py @dcbaker diff --git a/meson/COPYING b/meson/COPYING new file mode 100644 index 000000000..d64569567 --- /dev/null +++ b/meson/COPYING @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/meson/MANIFEST.in b/meson/MANIFEST.in new file mode 100644 index 000000000..c79182ba4 --- /dev/null +++ b/meson/MANIFEST.in @@ -0,0 +1,20 @@ +graft test?cases +graft manual?tests +graft cross +graft data +graft graphics +graft man +graft tools +graft packaging + +include contributing.md +include COPYING +include README.md +include run_cross_test.py +include run_tests.py +include run_unittests.py +include run_meson_command_tests.py +include run_project_tests.py +include ghwt.py +include __main__.py +include meson.py diff --git a/meson/README.md b/meson/README.md new file mode 100644 index 000000000..c08c6ffb9 --- /dev/null +++ b/meson/README.md @@ -0,0 +1,93 @@ +

+ +

+Meson® is a project to create the best possible next-generation +build system. + +#### Status + +[![PyPI](https://img.shields.io/pypi/v/meson.svg)](https://pypi.python.org/pypi/meson) +[![Build Status](https://dev.azure.com/jussi0947/jussi/_apis/build/status/mesonbuild.meson)](https://dev.azure.com/jussi0947/jussi/_build/latest?definitionId=1) +[![Codecov](https://codecov.io/gh/mesonbuild/meson/coverage.svg?branch=master)](https://codecov.io/gh/mesonbuild/meson/branch/master) +[![Code Quality: Python](https://img.shields.io/lgtm/grade/python/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/context:python) +[![Total Alerts](https://img.shields.io/lgtm/alerts/g/mesonbuild/meson.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/mesonbuild/meson/alerts) + +#### Dependencies + + - [Python](https://python.org) (version 3.6 or newer) + - [Ninja](https://ninja-build.org) (version 1.8.2 or newer) + +#### Installing from source + +Meson is available on [PyPi](https://pypi.python.org/pypi/meson), so +it can be installed with `pip3 install meson`. The exact command to +type to install with `pip` can vary between systems, be sure to use +the Python 3 version of `pip`. + +If you wish you can install it locally with the standard Python command: + +```console +python3 -m pip install meson +``` + +For builds using Ninja, Ninja can be downloaded directly from Ninja +[GitHub release page](https://github.com/ninja-build/ninja/releases) +or via [PyPi](https://pypi.python.org/pypi/ninja) + +```console +python3 -m pip install ninja +``` + +More on Installing Meson build can be found at the +[getting meson page](https://mesonbuild.com/Getting-meson.html). + +#### Running + +Meson requires that you have a source directory and a build directory +and that these two are different. In your source root must exist a +file called `meson.build`. To generate the build system run this +command: + +`meson setup ` + +Depending on how you obtained Meson the command might also be called +`meson.py` instead of plain `meson`. In the rest of this document we +are going to use the latter form. + +You can omit either of the two directories, and Meson will substitute +the current directory and autodetect what you mean. This allows you to +do things like this: + +```console +cd +meson setup builddir +``` + +To compile, cd into your build directory and type `ninja`. To run unit +tests, type `ninja test`. + +More on running Meson build system commands can be found at the +[running meson page](https://mesonbuild.com/Running-Meson.html) +or by typing `meson --help`. + +#### Contributing + +We love code contributions. See the [contribution +page](https://mesonbuild.com/Contributing.html) on the website for +details. + + +#### IRC + +The channel to use is `#mesonbuild` either via Matrix ([web +interface][matrix_web]) or [OFTC IRC][oftc_irc]. + +[matrix_web]: https://app.element.io/#/room/#mesonbuild:matrix.org +[oftc_irc]: https://www.oftc.net/ + +#### Further info + +More information about the Meson build system can be found at the +[project's home page](https://mesonbuild.com). + +Meson is a registered trademark of ***Jussi Pakkanen***. diff --git a/meson/azure-pipelines.yml b/meson/azure-pipelines.yml new file mode 100644 index 000000000..ca835c639 --- /dev/null +++ b/meson/azure-pipelines.yml @@ -0,0 +1,94 @@ +name: $(BuildID) + +trigger: + branches: + include: + - 'master' + # Release branches + - '0.*' + paths: + include: + - 'mesonbuild' + - 'test cases' + - 'azure-pipelines.yml' + - 'ci/azure-steps.yml' + - 'run_project_tests.py' + - 'run_tests.py' + - 'run_unittests.py' +pr: + branches: + include: + - '*' + paths: + include: + - 'mesonbuild' + - 'test cases' + - 'azure-pipelines.yml' + - 'ci/azure-steps.yml' + - 'run_project_tests.py' + - 'run_tests.py' + - 'run_unittests.py' + +variables: + CI: 1 + SOURCE_VERSION: $(Build.SourceVersion) + +jobs: + +- job: vs2017 + timeoutInMinutes: 120 + pool: + vmImage: VS2017-Win2016 + + strategy: + matrix: + vc2017x86ninja: + arch: x86 + compiler: msvc2017 + backend: ninja + MESON_RSP_THRESHOLD: 0 + vc2017x64vs: + arch: x64 + compiler: msvc2017 + backend: vs2017 + clangclx64ninja: + arch: x64 + compiler: clang-cl + backend: ninja + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.6' + addToPath: true + architecture: 'x64' + - template: ci/azure-steps.yml + +- job: vs2019 + timeoutInMinutes: 120 + pool: + vmImage: windows-2019 + + strategy: + matrix: + vc2019x64ninja: + arch: x64 + compiler: msvc2019 + backend: ninja + vc2019x64vs: + arch: x64 + compiler: msvc2019 + backend: vs2019 + vc2019arm64ninjacross: + arch: arm64 + compiler: msvc2019 + backend: ninja + extraargs: --cross arm64cl.txt --cross-only + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + addToPath: true + architecture: 'x64' + - template: ci/azure-steps.yml diff --git a/meson/ci/azure-steps.yml b/meson/ci/azure-steps.yml new file mode 100644 index 000000000..233bbfa36 --- /dev/null +++ b/meson/ci/azure-steps.yml @@ -0,0 +1,23 @@ +steps: +- task: PowerShell@2 + inputs: + targetType: 'filePath' + filePath: .\ci\run.ps1 + +- task: PublishTestResults@2 + inputs: + testResultsFiles: meson-test-run.xml + testRunTitle: $(System.JobName) + publishRunAttachments: true + condition: not(canceled()) + +- task: CopyFiles@2 + inputs: + contents: 'meson-test-run.*' + targetFolder: $(Build.ArtifactStagingDirectory) + condition: not(canceled()) + +- task: PublishBuildArtifacts@1 + inputs: + artifactName: $(System.JobName) + condition: not(canceled()) diff --git a/meson/ci/ciimage/.gitignore b/meson/ci/ciimage/.gitignore new file mode 100644 index 000000000..cff1864bf --- /dev/null +++ b/meson/ci/ciimage/.gitignore @@ -0,0 +1,3 @@ +/build_* +/test_* +/user.sh diff --git a/meson/ci/ciimage/arch/image.json b/meson/ci/ciimage/arch/image.json new file mode 100644 index 000000000..4a399d69e --- /dev/null +++ b/meson/ci/ciimage/arch/image.json @@ -0,0 +1,7 @@ +{ + "base_image": "archlinux:latest", + "env": { + "CI": "1", + "MESON_CI_JOBNAME": "linux-arch-gcc" + } +} diff --git a/meson/ci/ciimage/arch/install.sh b/meson/ci/ciimage/arch/install.sh new file mode 100755 index 000000000..72816ab95 --- /dev/null +++ b/meson/ci/ciimage/arch/install.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +# Inspired by https://github.com/greyltc/docker-archlinux-aur/blob/master/add-aur.sh + +pkgs=( + python python-pip + ninja make git sudo fakeroot autoconf automake patch + libelf gcc gcc-fortran gcc-objc vala rust bison flex cython go dlang-dmd + mono boost qt5-base gtkmm3 gtest gmock protobuf wxgtk2 gobject-introspection + itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz + doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools + libwmf valgrind cmake netcdf-fortran openmpi nasm gnustep-base gettext + python-lxml hotdoc rust-bindgen qt6-base qt6-tools + # cuda +) + +aur_pkgs=(scalapack) +cleanup_pkgs=(go) + +AUR_USER=docker +PACMAN_OPTS='--needed --noprogressbar --noconfirm' + +# Patch config files +sed -i 's/#Color/Color/g' /etc/pacman.conf +sed -i 's,#MAKEFLAGS="-j2",MAKEFLAGS="-j$(nproc)",g' /etc/makepkg.conf +sed -i "s,PKGEXT='.pkg.tar.zst',PKGEXT='.pkg.tar',g" /etc/makepkg.conf + +# Install packages +pacman -Syu $PACMAN_OPTS "${pkgs[@]}" +install_python_packages + +# Setup the user +useradd -m $AUR_USER +echo "${AUR_USER}:" | chpasswd -e +echo "$AUR_USER ALL = NOPASSWD: ALL" >> /etc/sudoers + +# Install yay +su $AUR_USER -c 'cd; git clone https://aur.archlinux.org/yay.git' +su $AUR_USER -c 'cd; cd yay; makepkg' +pushd /home/$AUR_USER/yay/ +pacman -U *.pkg.tar --noprogressbar --noconfirm +popd +rm -rf /home/$AUR_USER/yay + +# Install yay deps +su $AUR_USER -c "yay -S $PACMAN_OPTS ${aur_pkgs[*]}" + +# cleanup +pacman -Rs --noconfirm "${cleanup_pkgs[@]}" +su $AUR_USER -c "yes | yay -Scc" diff --git a/meson/ci/ciimage/bionic/image.json b/meson/ci/ciimage/bionic/image.json new file mode 100644 index 000000000..5df709e7e --- /dev/null +++ b/meson/ci/ciimage/bionic/image.json @@ -0,0 +1,8 @@ +{ + "base_image": "ubuntu:bionic", + "env": { + "CI": "1", + "DC": "gdc", + "MESON_CI_JOBNAME": "linux-bionic-gcc" + } +} diff --git a/meson/ci/ciimage/bionic/install.sh b/meson/ci/ciimage/bionic/install.sh new file mode 100755 index 000000000..4dea73ba8 --- /dev/null +++ b/meson/ci/ciimage/bionic/install.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +export DEBIAN_FRONTEND=noninteractive +export LANG='C.UTF-8' +export DC=gdc + +pkgs=( + python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev + wget unzip cmake doxygen + clang + pkg-config-arm-linux-gnueabihf + qt4-linguist-tools qt5-default qtbase5-private-dev + python-dev + libomp-dev + llvm lcov + ldc + libclang-dev + libgcrypt20-dev + libgpgme-dev + libhdf5-dev openssh-server + libboost-python-dev libboost-regex-dev + libblocksruntime-dev + libperl-dev libscalapack-mpi-dev libncurses-dev +) + +boost_pkgs=(atomic chrono date-time filesystem log regex serialization system test thread) + +sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" +apt-get -y update +apt-get -y upgrade +apt-get -y install eatmydata + +# Base stuff +eatmydata apt-get -y build-dep meson + +# Add boost packages +for i in "${boost_pkgs[@]}"; do + for j in "1.62.0" "1.65.1"; do + pkgs+=("libboost-${i}${j}") + done +done + +# packages +eatmydata apt-get -y install "${pkgs[@]}" + +install_python_packages + +# Install the ninja 0.10 +wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip +unzip ninja-linux.zip -d /ci + +# cleanup +apt-get -y remove ninja-build +apt-get -y clean +apt-get -y autoclean +rm ninja-linux.zip diff --git a/meson/ci/ciimage/build.py b/meson/ci/ciimage/build.py new file mode 100755 index 000000000..1e1f23811 --- /dev/null +++ b/meson/ci/ciimage/build.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python3 + +import json +import argparse +import stat +import textwrap +import shutil +import subprocess +from tempfile import TemporaryDirectory +from pathlib import Path +import typing as T + +image_namespace = 'mesonbuild' + +image_def_file = 'image.json' +install_script = 'install.sh' + +class ImageDef: + def __init__(self, image_dir: Path) -> None: + path = image_dir / image_def_file + data = json.loads(path.read_text(encoding='utf-8')) + + assert isinstance(data, dict) + assert all([x in data for x in ['base_image', 'env']]) + assert isinstance(data['base_image'], str) + assert isinstance(data['env'], dict) + + self.base_image: str = data['base_image'] + self.args: T.List[str] = data.get('args', []) + self.env: T.Dict[str, str] = data['env'] + +class BuilderBase(): + def __init__(self, data_dir: Path, temp_dir: Path) -> None: + self.data_dir = data_dir + self.temp_dir = temp_dir + + self.common_sh = self.data_dir.parent / 'common.sh' + self.common_sh = self.common_sh.resolve(strict=True) + self.validate_data_dir() + + self.image_def = ImageDef(self.data_dir) + + self.docker = shutil.which('docker') + self.git = shutil.which('git') + if self.docker is None: + raise RuntimeError('Unable to find docker') + if self.git is None: + raise RuntimeError('Unable to find git') + + def validate_data_dir(self) -> None: + files = [ + self.data_dir / image_def_file, + self.data_dir / install_script, + ] + if not self.data_dir.exists(): + raise RuntimeError(f'{self.data_dir.as_posix()} does not exist') + for i in files: + if not i.exists(): + raise RuntimeError(f'{i.as_posix()} does not exist') + if not i.is_file(): + raise RuntimeError(f'{i.as_posix()} is not a regular file') + +class Builder(BuilderBase): + def gen_bashrc(self) -> None: + out_file = self.temp_dir / 'env_vars.sh' + out_data = '' + + # run_tests.py parameters + self.image_def.env['CI_ARGS'] = ' '.join(self.image_def.args) + + for key, val in self.image_def.env.items(): + out_data += f'export {key}="{val}"\n' + + # Also add /ci to PATH + out_data += 'export PATH="/ci:$PATH"\n' + + out_file.write_text(out_data, encoding='utf-8') + + # make it executable + mode = out_file.stat().st_mode + out_file.chmod(mode | stat.S_IEXEC) + + def gen_dockerfile(self) -> None: + out_file = self.temp_dir / 'Dockerfile' + out_data = textwrap.dedent(f'''\ + FROM {self.image_def.base_image} + + ADD install.sh /ci/install.sh + ADD common.sh /ci/common.sh + ADD env_vars.sh /ci/env_vars.sh + RUN /ci/install.sh + ''') + + out_file.write_text(out_data, encoding='utf-8') + + def do_build(self) -> None: + # copy files + for i in self.data_dir.iterdir(): + shutil.copy(str(i), str(self.temp_dir)) + shutil.copy(str(self.common_sh), str(self.temp_dir)) + + self.gen_bashrc() + self.gen_dockerfile() + + cmd_git = [self.git, 'rev-parse', '--short', 'HEAD'] + res = subprocess.run(cmd_git, cwd=self.data_dir, stdout=subprocess.PIPE) + if res.returncode != 0: + raise RuntimeError('Failed to get the current commit hash') + commit_hash = res.stdout.decode().strip() + + cmd = [ + self.docker, 'build', + '-t', f'{image_namespace}/{self.data_dir.name}:latest', + '-t', f'{image_namespace}/{self.data_dir.name}:{commit_hash}', + '--pull', + self.temp_dir.as_posix(), + ] + if subprocess.run(cmd).returncode != 0: + raise RuntimeError('Failed to build the docker image') + +class ImageTester(BuilderBase): + def __init__(self, data_dir: Path, temp_dir: Path, ci_root: Path) -> None: + super().__init__(data_dir, temp_dir) + self.meson_root = ci_root.parent.parent.resolve() + + def gen_dockerfile(self) -> None: + out_file = self.temp_dir / 'Dockerfile' + out_data = textwrap.dedent(f'''\ + FROM {image_namespace}/{self.data_dir.name} + + ADD meson /meson + ''') + + out_file.write_text(out_data, encoding='utf-8') + + def copy_meson(self) -> None: + shutil.copytree( + self.meson_root, + self.temp_dir / 'meson', + ignore=shutil.ignore_patterns( + '.git', + '*_cache', + '__pycache__', + # 'work area', + self.temp_dir.name, + ) + ) + + def do_test(self, tty: bool = False) -> None: + self.copy_meson() + self.gen_dockerfile() + + try: + build_cmd = [ + self.docker, 'build', + '-t', 'meson_test_image', + self.temp_dir.as_posix(), + ] + if subprocess.run(build_cmd).returncode != 0: + raise RuntimeError('Failed to build the test docker image') + + test_cmd = [] + if tty: + test_cmd = [ + self.docker, 'run', '--rm', '-t', '-i', 'meson_test_image', + '/bin/bash', '-c', '' + + 'cd meson;' + + 'source /ci/env_vars.sh;' + + f'echo -e "\\n\\nInteractive test shell in the {image_namespace}/{self.data_dir.name} container with the current meson tree";' + + 'echo -e "The file ci/ciimage/user.sh will be sourced if it exists to enable user specific configurations";' + + 'echo -e "Run the following command to run all CI tests: ./run_tests.py $CI_ARGS\\n\\n";' + + '[ -f ci/ciimage/user.sh ] && exec /bin/bash --init-file ci/ciimage/user.sh;' + + 'exec /bin/bash;' + ] + else: + test_cmd = [ + self.docker, 'run', '--rm', '-t', 'meson_test_image', + '/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS' + ] + + if subprocess.run(test_cmd).returncode != 0 and not tty: + raise RuntimeError('Running tests failed') + finally: + cleanup_cmd = [self.docker, 'rmi', '-f', 'meson_test_image'] + subprocess.run(cleanup_cmd).returncode + +class ImageTTY(BuilderBase): + def __init__(self, data_dir: Path, temp_dir: Path, ci_root: Path) -> None: + super().__init__(data_dir, temp_dir) + self.meson_root = ci_root.parent.parent.resolve() + + def do_run(self) -> None: + try: + tty_cmd = [ + self.docker, 'run', + '--name', 'meson_test_container', '-t', '-i', '-v', f'{self.meson_root.as_posix()}:/meson', + f'{image_namespace}/{self.data_dir.name}', + '/bin/bash', '-c', '' + + 'cd meson;' + + 'source /ci/env_vars.sh;' + + f'echo -e "\\n\\nInteractive test shell in the {image_namespace}/{self.data_dir.name} container with the current meson tree";' + + 'echo -e "The file ci/ciimage/user.sh will be sourced if it exists to enable user specific configurations";' + + 'echo -e "Run the following command to run all CI tests: ./run_tests.py $CI_ARGS\\n\\n";' + + '[ -f ci/ciimage/user.sh ] && exec /bin/bash --init-file ci/ciimage/user.sh;' + + 'exec /bin/bash;' + ] + subprocess.run(tty_cmd).returncode != 0 + finally: + cleanup_cmd = [self.docker, 'rm', '-f', 'meson_test_container'] + subprocess.run(cleanup_cmd).returncode + + +def main() -> None: + parser = argparse.ArgumentParser(description='Meson CI image builder') + parser.add_argument('what', type=str, help='Which image to build / test') + parser.add_argument('-t', '--type', choices=['build', 'test', 'testTTY', 'TTY'], help='What to do', required=True) + + args = parser.parse_args() + + ci_root = Path(__file__).parent + ci_data = ci_root / args.what + + with TemporaryDirectory(prefix=f'{args.type}_{args.what}_', dir=ci_root) as td: + ci_build = Path(td) + print(f'Build dir: {ci_build}') + + if args.type == 'build': + builder = Builder(ci_data, ci_build) + builder.do_build() + elif args.type == 'test': + tester = ImageTester(ci_data, ci_build, ci_root) + tester.do_test() + elif args.type == 'testTTY': + tester = ImageTester(ci_data, ci_build, ci_root) + tester.do_test(tty=True) + elif args.type == 'TTY': + tester = ImageTTY(ci_data, ci_build, ci_root) + tester.do_run() + +if __name__ == '__main__': + main() diff --git a/meson/ci/ciimage/common.sh b/meson/ci/ciimage/common.sh new file mode 100644 index 000000000..707b7515f --- /dev/null +++ b/meson/ci/ciimage/common.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +### +### Common functions for CI builder files. +### All functions can be accessed in install.sh via: +### +### $ source /ci/common.sh +### + +set -e +set -x + +base_python_pkgs=( + pytest + pytest-xdist + coverage + codecov + jsonschema +) + +python_pkgs=( + cython + gobject + PyGObject + lxml + gcovr +) + +dub_fetch() { + set +e + for (( i=1; i<=24; ++i )); do + dub fetch "$@" + (( $? == 0 )) && break + + echo "Dub Fetch failed. Retrying in $((i*5))s" + sleep $((i*5)) + done + set -e +} + +install_minimal_python_packages() { + python3 -m pip install "${base_python_pkgs[@]}" $* +} + +install_python_packages() { + python3 -m pip install "${base_python_pkgs[@]}" "${python_pkgs[@]}" $* +} diff --git a/meson/ci/ciimage/cuda/image.json b/meson/ci/ciimage/cuda/image.json new file mode 100644 index 000000000..f422723c1 --- /dev/null +++ b/meson/ci/ciimage/cuda/image.json @@ -0,0 +1,8 @@ +{ + "base_image": "archlinux:latest", + "args": ["--only", "cuda"], + "env": { + "CI": "1", + "MESON_CI_JOBNAME": "linux-cuda-gcc" + } +} diff --git a/meson/ci/ciimage/cuda/install.sh b/meson/ci/ciimage/cuda/install.sh new file mode 100755 index 000000000..0d412e00c --- /dev/null +++ b/meson/ci/ciimage/cuda/install.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +pkgs=( + python python-pip + ninja gcc gcc-objc git cmake + cuda zlib pkgconf +) + +PACMAN_OPTS='--needed --noprogressbar --noconfirm' + +pacman -Syu $PACMAN_OPTS "${pkgs[@]}" +install_minimal_python_packages + +# Manually remove cache to avoid GitHub space restrictions +rm -rf /var/cache/pacman + +echo "source /etc/profile.d/cuda.sh" >> /ci/env_vars.sh diff --git a/meson/ci/ciimage/fedora/image.json b/meson/ci/ciimage/fedora/image.json new file mode 100644 index 000000000..c6fdc9e28 --- /dev/null +++ b/meson/ci/ciimage/fedora/image.json @@ -0,0 +1,8 @@ +{ + "base_image": "fedora:latest", + "env": { + "CI": "1", + "SKIP_STATIC_BOOST": "1", + "MESON_CI_JOBNAME": "linux-fedora-gcc" + } +} diff --git a/meson/ci/ciimage/fedora/install.sh b/meson/ci/ciimage/fedora/install.sh new file mode 100755 index 000000000..df1d853cd --- /dev/null +++ b/meson/ci/ciimage/fedora/install.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +pkgs=( + python python-pip python3-devel + ninja-build make git autoconf automake patch + elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-objc++ vala rust bison flex ldc libasan libasan-static + mono-core boost-devel gtkmm30 gtest-devel gmock-devel protobuf-devel wxGTK3-devel gobject-introspection + boost-python3-devel + itstool gtk3-devel java-latest-openjdk-devel gtk-doc llvm-devel clang-devel SDL2-devel graphviz-devel zlib zlib-devel zlib-static + #hdf5-openmpi-devel hdf5-devel netcdf-openmpi-devel netcdf-devel netcdf-fortran-openmpi-devel netcdf-fortran-devel scalapack-openmpi-devel + doxygen vulkan-devel vulkan-validation-layers-devel openssh mercurial gtk-sharp2-devel libpcap-devel gpgme-devel + qt5-qtbase-devel qt5-qttools-devel qt5-linguist qt5-qtbase-private-devel + libwmf-devel valgrind cmake openmpi-devel nasm gnustep-base-devel gettext-devel ncurses-devel + libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel libgcrypt-devel +) + +# Sys update +dnf -y upgrade + +# Install deps +dnf -y install "${pkgs[@]}" +install_python_packages hotdoc + +# Cleanup +dnf -y clean all diff --git a/meson/ci/ciimage/opensuse/image.json b/meson/ci/ciimage/opensuse/image.json new file mode 100644 index 000000000..6609aa08d --- /dev/null +++ b/meson/ci/ciimage/opensuse/image.json @@ -0,0 +1,9 @@ +{ + "base_image": "opensuse/tumbleweed:latest", + "env": { + "CI": "1", + "SKIP_STATIC_BOOST": "1", + "SINGLE_DUB_COMPILER": "1", + "MESON_CI_JOBNAME": "linux-opensuse-gcc" + } +} diff --git a/meson/ci/ciimage/opensuse/install.sh b/meson/ci/ciimage/opensuse/install.sh new file mode 100755 index 000000000..41cb96192 --- /dev/null +++ b/meson/ci/ciimage/opensuse/install.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +pkgs=( + python3-pip python3 python3-devel + ninja make git autoconf automake patch libjpeg-devel + elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov + mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel + itstool gtk3-devel java-15-openjdk-devel gtk-doc llvm-devel clang-devel libSDL2-devel graphviz-devel zlib-devel zlib-devel-static + #hdf5-devel netcdf-devel libscalapack2-openmpi3-devel libscalapack2-gnu-openmpi3-hpc-devel openmpi3-devel + doxygen vulkan-devel vulkan-validationlayers openssh mercurial gtk-sharp3-complete gtk-sharp2-complete libpcap-devel libgpgme-devel + libqt5-qtbase-devel libqt5-qttools-devel libqt5-linguist libqt5-qtbase-private-headers-devel + libwmf-devel valgrind cmake nasm gnustep-base-devel gettext-tools gettext-runtime gettext-csharp ncurses-devel + libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel + boost-devel libboost_date_time-devel libboost_filesystem-devel libboost_locale-devel libboost_system-devel + libboost_test-devel libboost_log-devel libboost_regex-devel + libboost_python3-devel libboost_regex-devel +) + +# Sys update +zypper --non-interactive patch --with-update --with-optional +zypper --non-interactive update + +# Install deps +zypper install -y "${pkgs[@]}" +install_python_packages hotdoc + +echo 'export PKG_CONFIG_PATH="/usr/lib64/mpi/gcc/openmpi3/lib64/pkgconfig:$PKG_CONFIG_PATH"' >> /ci/env_vars.sh + +# dmd is very special on OpenSUSE (as in the packages do not work) +# see https://bugzilla.opensuse.org/show_bug.cgi?id=1162408 +curl -fsS https://dlang.org/install.sh | bash -s dmd | tee dmd_out.txt +cat dmd_out.txt | grep source | sed 's/^[^`]*`//g' | sed 's/`.*//g' >> /ci/env_vars.sh +chmod +x /ci/env_vars.sh + +source /ci/env_vars.sh + +dub_fetch urld +dub build urld --compiler=dmd +dub_fetch dubtestproject +dub build dubtestproject:test1 --compiler=dmd +dub build dubtestproject:test2 --compiler=dmd + +# Cleanup +zypper --non-interactive clean --all diff --git a/meson/ci/ciimage/ubuntu-rolling/image.json b/meson/ci/ciimage/ubuntu-rolling/image.json new file mode 100644 index 000000000..f9f068d5a --- /dev/null +++ b/meson/ci/ciimage/ubuntu-rolling/image.json @@ -0,0 +1,8 @@ +{ + "base_image": "ubuntu:rolling", + "env": { + "CI": "1", + "DC": "gdc", + "MESON_CI_JOBNAME": "linux-ubuntu-rolling-gcc" + } +} diff --git a/meson/ci/ciimage/ubuntu-rolling/install.sh b/meson/ci/ciimage/ubuntu-rolling/install.sh new file mode 100755 index 000000000..770fd8531 --- /dev/null +++ b/meson/ci/ciimage/ubuntu-rolling/install.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +export DEBIAN_FRONTEND=noninteractive +export LANG='C.UTF-8' +export DC=gdc + +pkgs=( + python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev + wget unzip + qt5-qmake qtbase5-dev qtchooser qtbase5-dev-tools clang + pkg-config-arm-linux-gnueabihf + libomp-dev + llvm lcov + dub ldc + mingw-w64 mingw-w64-tools nim + libclang-dev + libgcrypt20-dev + libgpgme-dev + libhdf5-dev + libboost-python-dev libboost-regex-dev + libblocksruntime-dev + libperl-dev + liblapack-dev libscalapack-mpi-dev + bindgen +) + +sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" +apt-get -y update +apt-get -y upgrade +apt-get -y install eatmydata + +# Base stuff +eatmydata apt-get -y build-dep meson + +# packages +eatmydata apt-get -y install "${pkgs[@]}" +eatmydata apt-get -y install --no-install-recommends wine-stable # Wine is special + +install_python_packages hotdoc + +# dub stuff +dub_fetch urld +dub build urld --compiler=gdc +dub_fetch dubtestproject +dub build dubtestproject:test1 --compiler=ldc2 +dub build dubtestproject:test2 --compiler=ldc2 + +# cleanup +apt-get -y clean +apt-get -y autoclean diff --git a/meson/ci/ciimage/ubuntu-rolling/test.sh b/meson/ci/ciimage/ubuntu-rolling/test.sh new file mode 100755 index 000000000..f6956bb60 --- /dev/null +++ b/meson/ci/ciimage/ubuntu-rolling/test.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +testFN() { + set +e + false +} + +testFN +false +exit 0 diff --git a/meson/ci/run.ps1 b/meson/ci/run.ps1 new file mode 100644 index 000000000..5f256858d --- /dev/null +++ b/meson/ci/run.ps1 @@ -0,0 +1,106 @@ +python ./skip_ci.py --base-branch-env=SYSTEM_PULLREQUEST_TARGETBRANCH --is-pull-env=SYSTEM_PULLREQUEST_PULLREQUESTID --base-branch-origin +if ($LastExitCode -ne 0) { + exit 0 +} + +# remove Chocolately, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it +# remove PostgreSQL from path so we don't pickup a broken zlib from it +$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';' + +if ($env:arch -eq 'x64') { + # Rust puts its shared stdlib in a secret place, but it is needed to run tests. + $env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin" +} elseif ($env:arch -eq 'x86') { + # Switch to the x86 Rust toolchain + rustup default stable-i686-pc-windows-msvc + # Rust puts its shared stdlib in a secret place, but it is needed to run tests. + $env:Path += ";$HOME/.rustup/toolchains/stable-i686-pc-windows-msvc/bin" + # Need 32-bit Python for tests that need the Python dependency + $env:Path = "C:\hostedtoolcache\windows\Python\3.6.8\x86;C:\hostedtoolcache\windows\Python\3.6.8\x86\Scripts;$env:Path" +} + +# Set the CI env var for the meson test framework +$env:CI = '1' + +# download and install prerequisites +function DownloadFile([String] $Source, [String] $Destination) { + $retries = 10 + echo "Downloading $Source" + for ($i = 1; $i -le $retries; $i++) { + try { + (New-Object net.webclient).DownloadFile($Source, $Destination) + break # succeeded + } catch [net.WebException] { + if ($i -eq $retries) { + throw # fail on last retry + } + $backoff = (10 * $i) # backoff 10s, 20s, 30s... + echo ('{0}: {1}' -f $Source, $_.Exception.Message) + echo ('Retrying in {0}s...' -f $backoff) + Start-Sleep -m ($backoff * 1000) + } + } +} + + +if (($env:backend -eq 'ninja') -and ($env:arch -ne 'arm64')) { $dmd = $true } else { $dmd = $false } + +DownloadFile -Source https://github.com/mesonbuild/cidata/releases/download/ci3/ci_data.zip -Destination $env:AGENT_WORKFOLDER\ci_data.zip +echo "Extracting ci_data.zip" +Expand-Archive $env:AGENT_WORKFOLDER\ci_data.zip -DestinationPath $env:AGENT_WORKFOLDER\ci_data +& "$env:AGENT_WORKFOLDER\ci_data\install.ps1" -Arch $env:arch -Compiler $env:compiler -Boost $true -DMD $dmd + + +echo "=== PATH BEGIN ===" +echo ($env:Path).Replace(';',"`n") +echo "=== PATH END ===" +echo "" + +$progs = @("python","ninja","pkg-config","cl","rc","link") +foreach ($prog in $progs) { + echo "" + echo "Locating ${prog}:" + where.exe $prog +} + +echo "" +echo "Ninja / MSBuld version:" +if ($env:backend -eq 'ninja') { + ninja --version +} else { + MSBuild /version +} + +echo "" +echo "Python version:" +python --version + +# Needed for running unit tests in parallel. +echo "" +python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist jsonschema coverage + +echo "" +echo "=== Start running tests ===" +# Starting from VS2019 Powershell(?) will fail the test run +# if it prints anything to stderr. Python's test runner +# does that by default so we need to forward it. +cmd /c "python 2>&1 ./tools/run_with_cov.py run_tests.py --backend $env:backend $env:extraargs" + +$result = $LastExitCode + +echo "" +echo "" +echo "=== Gathering coverage report ===" +echo "" + +python3 -m coverage combine +python3 -m coverage xml +python3 -m coverage report + +# Currently codecov.py does not handle Azure, use this fork of a fork to get it +# working without requireing a token +git clone https://github.com/mensinda/codecov-python +python3 -m pip install --ignore-installed ./codecov-python +python3 -m codecov -f .coverage/coverage.xml -n "VS$env:compiler $env:arch $env:backend" -c $env:SOURCE_VERSION + +exit $result diff --git a/meson/ci/upload_cov.sh b/meson/ci/upload_cov.sh new file mode 100755 index 000000000..089641b47 --- /dev/null +++ b/meson/ci/upload_cov.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +echo "Combining coverage reports..." +coverage combine + +echo "Generating XML report..." +coverage xml + +echo "Printing report" +coverage report + +echo "Uploading to codecov..." +codecov -f .coverage/coverage.xml -n "$1" diff --git a/meson/ci/usercustomize.py b/meson/ci/usercustomize.py new file mode 100644 index 000000000..72421ba0d --- /dev/null +++ b/meson/ci/usercustomize.py @@ -0,0 +1,19 @@ +# Copyright 2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script is used by coverage (see tools/run_with_cov.py) to enable coverage +# reports in python subprocesses + +import coverage +coverage.process_startup() diff --git a/meson/contributing.md b/meson/contributing.md new file mode 100644 index 000000000..3d1f4bc3d --- /dev/null +++ b/meson/contributing.md @@ -0,0 +1,8 @@ +## Contributing to the Meson build system + +Thank you for your interest in participating to the development! +A large fraction of Meson is contributed by people outside +the core team and we are *excited* to see what you do. + +**Contribution instructions can be found on the website** + @ https://mesonbuild.com/Contributing.html diff --git a/meson/cross/arm64cl.txt b/meson/cross/arm64cl.txt new file mode 100644 index 000000000..f22fca8a8 --- /dev/null +++ b/meson/cross/arm64cl.txt @@ -0,0 +1,17 @@ +[binaries] +c = 'cl' +cpp = 'cl' +ar = 'lib' +windres = 'rc' + +[built-in options] +c_args = ['-DWINAPI_FAMILY=WINAPI_FAMILY_APP'] +c_link_args = ['-APPCONTAINER', 'WindowsApp.lib'] +cpp_args = ['-DWINAPI_FAMILY=WINAPI_FAMILY_APP'] +cpp_link_args = ['-APPCONTAINER', 'WindowsApp.lib'] + +[host_machine] +system = 'windows' +cpu_family = 'aarch64' +cpu = 'armv8' +endian = 'little' diff --git a/meson/cross/armcc.txt b/meson/cross/armcc.txt new file mode 100644 index 000000000..ae65c9e5e --- /dev/null +++ b/meson/cross/armcc.txt @@ -0,0 +1,20 @@ +# This file assumes that path to the arm compiler toolchain is added +# to the environment(PATH) variable, so that Meson can find +# the armcc, armlink and armar while building. +[binaries] +c = 'armcc' +cpp = 'armcc' +ar = 'armar' +strip = 'armar' + +[built-in options] +# The '--cpu' option with the appropriate target type should be mentioned +# to cross compile c/c++ code with armcc,. +c_args = ['--cpu=Cortex-M0plus'] +cpp_args = ['--cpu=Cortex-M0plus'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'arm' +cpu = 'Cortex-M0+' +endian = 'little' diff --git a/meson/cross/armclang-linux.txt b/meson/cross/armclang-linux.txt new file mode 100644 index 000000000..10f6fa44b --- /dev/null +++ b/meson/cross/armclang-linux.txt @@ -0,0 +1,34 @@ +# Using ARM compilers from Linux command line is tricky and +# not really well documented because they want you to use +# their IDE instead. +# +# First you need to do the full install with the IDE and set +# up license files et al. This may be possible from the command +# line. +# +# Then you need to do the following: +# +# Select toolchain by running /opt/arm/developmentstudio-2019.0/bin/select_default_toolchain +# Armcc is only available in toolchain version 5. +# Armclang is only available in toolchain version 6. +# Start shell with /opt/arm/developmentstudio-2019.0/bin/suite_exec zsh +# Now the compilers will work. + +[binaries] +# we could set exe_wrapper = qemu-arm-static but to test the case +# when cross compiled binaries can't be run we don't do that +c = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler6.12/bin/armclang' +#c = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler5.06u6/bin/armcc' +#cpp = '/usr/bin/arm-linux-gnueabihf-g++' +ar = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler6.12/bin/armar' +#strip = '/usr/arm-linux-gnueabihf/bin/strip' +#pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' + +[built-in options] +c_args = ['--target=aarch64-arm-none-eabi'] + +[host_machine] +system = 'baremetal' +cpu_family = 'arm' +cpu = 'armv7' # Not sure if correct. +endian = 'little' diff --git a/meson/cross/armclang.txt b/meson/cross/armclang.txt new file mode 100644 index 000000000..6146e0d74 --- /dev/null +++ b/meson/cross/armclang.txt @@ -0,0 +1,20 @@ +# This file assumes that path to the arm compiler toolchain is added +# to the environment(PATH) variable, so that Meson can find +# the armclang, armlink and armar while building. +[binaries] +c = 'armclang' +cpp = 'armclang' +ar = 'armar' +strip = 'armar' + +[built-in options] +# The '--target', '-mcpu' options with the appropriate values should be mentioned +# to cross compile c/c++ code with armclang. +c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] +cpp_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'arm' +cpu = 'Cortex-M0+' +endian = 'little' diff --git a/meson/cross/c2000.txt b/meson/cross/c2000.txt new file mode 100644 index 000000000..61c03109d --- /dev/null +++ b/meson/cross/c2000.txt @@ -0,0 +1,28 @@ +# This file assumes that path to the Texas Instruments C20000 toolchain is added +# to the environment(PATH) variable, so that Meson can find +# cl2000 and ar2000 while building. +[binaries] +c = 'cl2000' +ar = 'ar2000' +strip = 'cl2000' + +[host_machine] +system = 'bare metal' +cpu_family = 'c2000' +cpu = 'c28x' +endian = 'little' + +[built-in options] +c_args = [ + '-v28', + '-ml', + '-mt'] +c_link_args = [ + '-z', + '--rom_model', + '\f28004x_flash.cmd'] +cpp_args = [] +cpp_link_args = [] + +[properties] +needs_exe_wrapper = true diff --git a/meson/cross/ccomp-armv7a.txt b/meson/cross/ccomp-armv7a.txt new file mode 100644 index 000000000..af66ed269 --- /dev/null +++ b/meson/cross/ccomp-armv7a.txt @@ -0,0 +1,13 @@ +[binaries] +c = 'ccomp' +ar = 'ccomp' +strip = 'strip' + +[built-in options] +c_args = ['-target', 'armv7a-eabi', '-fall'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'arm' +cpu = 'Cortex-A9' +endian = 'little' diff --git a/meson/cross/ccrx.txt b/meson/cross/ccrx.txt new file mode 100644 index 000000000..f1b536c3a --- /dev/null +++ b/meson/cross/ccrx.txt @@ -0,0 +1,22 @@ +# This file assumes that path to the Renesas CC-RX toolchain is added +# to the environment(PATH) variable, so that Meson can find +# ccrx and rlink while building. +[binaries] +c = 'ccrx' +cpp = 'ccrx' +ar = 'rlink' +strip = 'rlink' + +[built-in options] +# The '--cpu' option with the appropriate target type should be mentioned +# to cross compile c/c++ code with ccrx,. +c_args = ['-cpu=rx600'] +cpp_args = ['-cpu=rx600'] +c_link_args = [] +cpp_link_args = [] + +[host_machine] +system = 'bare metal' +cpu_family = 'rx' +cpu = 'rx600' +endian = 'little' diff --git a/meson/cross/iphone.txt b/meson/cross/iphone.txt new file mode 100644 index 000000000..965940722 --- /dev/null +++ b/meson/cross/iphone.txt @@ -0,0 +1,27 @@ +# This is a cross compilation file from OSX Yosemite to iPhone +# Apple keeps changing the location and names of files so +# these might not work for you. Use the googels and xcrun. + +[binaries] +c = 'clang' +cpp = 'clang++' +ar = 'ar' +strip = 'strip' + +[built-in options] +c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] +cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] +c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] +cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] + +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' +has_function_printf = true +has_function_hfkerhisadf = false + +[host_machine] +system = 'darwin' +cpu_family = 'arm' +cpu = 'armv7' +endian = 'little' + diff --git a/meson/cross/linux-mingw-w64-32bit.json b/meson/cross/linux-mingw-w64-32bit.json new file mode 100644 index 000000000..476111183 --- /dev/null +++ b/meson/cross/linux-mingw-w64-32bit.json @@ -0,0 +1,7 @@ +{ + "file": "linux-mingw-w64-32bit.txt", + "tests": ["common", "cmake"], + "env": { + "WINEPATH": "/usr/lib/gcc/i686-w64-mingw32/9.2-posix;/usr/i686-w64-mingw32/bin;/usr/i686-w64-mingw32/lib" + } +} diff --git a/meson/cross/linux-mingw-w64-32bit.txt b/meson/cross/linux-mingw-w64-32bit.txt new file mode 100644 index 000000000..caf1da1a0 --- /dev/null +++ b/meson/cross/linux-mingw-w64-32bit.txt @@ -0,0 +1,31 @@ +[binaries] +c = '/usr/bin/i686-w64-mingw32-gcc' +cpp = '/usr/bin/i686-w64-mingw32-g++' +objc = '/usr/bin/i686-w64-mingw32-gcc' +ar = '/usr/bin/i686-w64-mingw32-ar' +strip = '/usr/bin/i686-w64-mingw32-strip' +pkgconfig = '/usr/bin/i686-w64-mingw32-pkg-config' +windres = '/usr/bin/i686-w64-mingw32-windres' +exe_wrapper = 'wine' +ld = '/usr/bin/i686-w64-mingw32-ld' +cmake = '/usr/bin/cmake' + +[properties] +# Directory that contains 'bin', 'lib', etc +root = '/usr/i686-w64-mingw32' +# Directory that contains 'bin', 'lib', etc for the toolchain and system libraries +sys_root = '/usr/i686-w64-mingw32/sys-root/mingw' + +[host_machine] +system = 'windows' +cpu_family = 'x86' +cpu = 'i686' +endian = 'little' + +[cmake] + +CMAKE_BUILD_WITH_INSTALL_RPATH = 'ON' +CMAKE_FIND_ROOT_PATH_MODE_PROGRAM = 'NEVER' +CMAKE_FIND_ROOT_PATH_MODE_LIBRARY = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_INCLUDE = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_PACKAGE = 'ONLY' diff --git a/meson/cross/linux-mingw-w64-64bit.json b/meson/cross/linux-mingw-w64-64bit.json new file mode 100644 index 000000000..df344da9d --- /dev/null +++ b/meson/cross/linux-mingw-w64-64bit.json @@ -0,0 +1,7 @@ +{ + "file": "linux-mingw-w64-64bit.txt", + "tests": ["common", "cmake"], + "env": { + "WINEPATH": "/usr/lib/gcc/x86_64-w64-mingw32/9.2-posix;/usr/x86_64-w64-mingw32/bin;/usr/x86_64-w64-mingw32/lib" + } +} diff --git a/meson/cross/linux-mingw-w64-64bit.txt b/meson/cross/linux-mingw-w64-64bit.txt new file mode 100644 index 000000000..f49fb35bf --- /dev/null +++ b/meson/cross/linux-mingw-w64-64bit.txt @@ -0,0 +1,30 @@ +[binaries] +c = '/usr/bin/x86_64-w64-mingw32-gcc' +cpp = '/usr/bin/x86_64-w64-mingw32-g++' +objc = '/usr/bin/x86_64-w64-mingw32-gcc' +ar = '/usr/bin/x86_64-w64-mingw32-ar' +strip = '/usr/bin/x86_64-w64-mingw32-strip' +pkgconfig = '/usr/bin/x86_64-w64-mingw32-pkg-config' +windres = '/usr/bin/x86_64-w64-mingw32-windres' +exe_wrapper = 'wine64' +cmake = '/usr/bin/cmake' + +[properties] +# Directory that contains 'bin', 'lib', etc +root = '/usr/x86_64-w64-mingw32' +# Directory that contains 'bin', 'lib', etc for the toolchain and system libraries +sys_root = '/usr/x86_64-w64-mingw32/sys-root/mingw' + +[host_machine] +system = 'windows' +cpu_family = 'x86_64' +cpu = 'x86_64' +endian = 'little' + +[cmake] + +CMAKE_BUILD_WITH_INSTALL_RPATH = 'ON' +CMAKE_FIND_ROOT_PATH_MODE_PROGRAM = 'NEVER' +CMAKE_FIND_ROOT_PATH_MODE_LIBRARY = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_INCLUDE = 'ONLY' +CMAKE_FIND_ROOT_PATH_MODE_PACKAGE = 'ONLY' diff --git a/meson/cross/none.txt b/meson/cross/none.txt new file mode 100644 index 000000000..8727e27f4 --- /dev/null +++ b/meson/cross/none.txt @@ -0,0 +1,18 @@ +# native file used to make the build machine compiler unusable + +[host_machine] +system = 'none' +cpu_family = 'none' +cpu = 'none' +endian = 'little' + +[properties] + +[binaries] +c = ['false'] +cpp = ['false'] +objc = ['false'] +objcpp = ['false'] +ar = ['false'] +pkgconfig = ['false'] +cmake = ['false'] diff --git a/meson/cross/ownstdlib.txt b/meson/cross/ownstdlib.txt new file mode 100644 index 000000000..bdff6f44d --- /dev/null +++ b/meson/cross/ownstdlib.txt @@ -0,0 +1,13 @@ +# This is a setup for compiling a program that runs natively +# but uses a custom std lib. This test will only work on +# x86_64. + +[target_machine] +system = 'linux' +cpu_family = 'x86_64' +cpu = 'x86_64' +endian = 'little' + +[properties] + +c_stdlib = 'mylibc' # Subproject name diff --git a/meson/cross/tvos.txt b/meson/cross/tvos.txt new file mode 100644 index 000000000..833f04bfd --- /dev/null +++ b/meson/cross/tvos.txt @@ -0,0 +1,28 @@ +# This is a cross compilation file from OSX Yosemite to Apple tvOS +# Apple keeps changing the location and names of files so +# these might not work for you. Use the googels and xcrun. + +[binaries] +c = 'clang' +cpp = 'clang++' +ar = 'ar' +strip = 'strip' + +[built-in options] +c_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] +cpp_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] +c_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] +cpp_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] + +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' + +has_function_printf = true +has_function_hfkerhisadf = false + +[host_machine] +system = 'darwin' +cpu_family = 'arm' +cpu = 'arm64' +endian = 'little' + diff --git a/meson/cross/ubuntu-armhf.json b/meson/cross/ubuntu-armhf.json new file mode 100644 index 000000000..40f5619c2 --- /dev/null +++ b/meson/cross/ubuntu-armhf.json @@ -0,0 +1,5 @@ +{ + "file": "ubuntu-armhf.txt", + "tests": ["common"], + "env": {} +} diff --git a/meson/cross/ubuntu-armhf.txt b/meson/cross/ubuntu-armhf.txt new file mode 100644 index 000000000..69e0c8611 --- /dev/null +++ b/meson/cross/ubuntu-armhf.txt @@ -0,0 +1,29 @@ +[binaries] +# we could set exe_wrapper = qemu-arm-static but to test the case +# when cross compiled binaries can't be run we don't do that +c = '/usr/bin/arm-linux-gnueabihf-gcc' +cpp = '/usr/bin/arm-linux-gnueabihf-g++' +rust = ['rustc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7'] +ar = '/usr/arm-linux-gnueabihf/bin/ar' +strip = '/usr/arm-linux-gnueabihf/bin/strip' +pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' +ld = '/usr/bin/arm-linux/gnueabihf-ld' + +[built-in options] +# Used in unit test '140 get define' +c_args = ['-DMESON_TEST_ISSUE_1665=1'] +cpp_args = '-DMESON_TEST_ISSUE_1665=1' + +[properties] +root = '/usr/arm-linux-gnueabihf' + +has_function_printf = true +has_function_hfkerhisadf = false + +skip_sanity_check = true + +[host_machine] +system = 'linux' +cpu_family = 'arm' +cpu = 'armv7' # Not sure if correct. +endian = 'little' diff --git a/meson/cross/ubuntu-faketarget.txt b/meson/cross/ubuntu-faketarget.txt new file mode 100644 index 000000000..cc43998cc --- /dev/null +++ b/meson/cross/ubuntu-faketarget.txt @@ -0,0 +1,13 @@ +# This is a setup for compiling a program that runs natively +# but produces output that runs on a different platform. +# That is either a cross compiler or something like binutils. + +# We don't need to specify any properties or compilers, +# for we use the native ones and can run the resulting +# binaries directly. + +[target_machine] +system = 'linux' +cpu_family = 'mips' +cpu = 'mips' +endian = 'little' diff --git a/meson/cross/wasm.txt b/meson/cross/wasm.txt new file mode 100644 index 000000000..f2d0cd7f5 --- /dev/null +++ b/meson/cross/wasm.txt @@ -0,0 +1,18 @@ +[binaries] +c = '/home/jpakkane/emsdk/fastcomp/emscripten/emcc' +cpp = '/home/jpakkane/emsdk/fastcomp/emscripten/em++' +ar = '/home/jpakkane/emsdk/fastcomp/emscripten/emar' + +[built-in options] +c_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] +c_link_args = ['-s','EXPORT_ALL=1'] +cpp_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] +cpp_link_args = ['-s', 'EXPORT_ALL=1'] + +[host_machine] + +system = 'emscripten' +cpu_family = 'wasm32' +cpu = 'wasm32' +endian = 'little' + diff --git a/meson/cross/xc16.txt b/meson/cross/xc16.txt new file mode 100644 index 000000000..c66889deb --- /dev/null +++ b/meson/cross/xc16.txt @@ -0,0 +1,26 @@ +# This file assumes that path to the Microchip xc16 toolchain is added +# to the environment(PATH) variable, so that Meson can find +# xc16-gcc and xc16-ar while building. +[binaries] +c = 'xc16-gcc' +ar = 'xc16-ar' +strip = 'xc16-gcc' + +[host_machine] +system = 'bare metal' +cpu_family = 'dspic' +cpu = '33ep64mc203' +endian = 'little' + +[properties] +needs_exe_wrapper = true + +[built-in options] +c_args = [ + '-c', + '-mcpu=33EP64MC203', + '-omf=elf'] +c_link_args = [ + '-mcpu=33EP64MC203', + '-omf=elf', + '-Wl,--script=p33EP64MC203.gld,'] diff --git a/meson/data/.coveragerc.in b/meson/data/.coveragerc.in new file mode 100644 index 000000000..328e13c30 --- /dev/null +++ b/meson/data/.coveragerc.in @@ -0,0 +1,25 @@ +[run] +branch = True +parallel = True +concurrency = multiprocessing +data_file = @ROOT@/.coverage/coverage +source = @ROOT@/mesonbuild/ + +[report] +exclude_lines = + if T.TYPE_CHECKING: + +[paths] +mesonbuild = + mesonbuild/ + __w/meson/meson/mesonbuild/ + @ROOT@/mesonbuild/ + +[html] +directory = @ROOT@/.coverage/html + +[xml] +output = @ROOT@/.coverage/coverage.xml + +[json] +output = @ROOT@/.coverage/coverage.json diff --git a/meson/data/com.mesonbuild.install.policy b/meson/data/com.mesonbuild.install.policy new file mode 100644 index 000000000..65bf3efca --- /dev/null +++ b/meson/data/com.mesonbuild.install.policy @@ -0,0 +1,22 @@ + + + + + The Meson Build System + https://github.com/mesonbuild/meson + + + Install the given project via Meson + Authentication is required to install this project + preferences-system + + no + no + auth_admin_keep + + /usr/bin/python3 + /usr/bin/meson + + + diff --git a/meson/data/macros.meson b/meson/data/macros.meson new file mode 100644 index 000000000..cc4953c5f --- /dev/null +++ b/meson/data/macros.meson @@ -0,0 +1,45 @@ +%__meson %{_bindir}/meson +%__meson_wrap_mode nodownload +%__meson_auto_features enabled + +%meson \ + %set_build_flags \ + %{shrink:%{__meson} \ + --buildtype=plain \ + --prefix=%{_prefix} \ + --libdir=%{_libdir} \ + --libexecdir=%{_libexecdir} \ + --bindir=%{_bindir} \ + --sbindir=%{_sbindir} \ + --includedir=%{_includedir} \ + --datadir=%{_datadir} \ + --mandir=%{_mandir} \ + --infodir=%{_infodir} \ + --localedir=%{_datadir}/locale \ + --sysconfdir=%{_sysconfdir} \ + --localstatedir=%{_localstatedir} \ + --sharedstatedir=%{_sharedstatedir} \ + --wrap-mode=%{__meson_wrap_mode} \ + --auto-features=%{__meson_auto_features} \ + %{_vpath_srcdir} %{_vpath_builddir} \ + %{nil}} + +%meson_build \ + %{shrink:%{__meson} compile \ + -C %{_vpath_builddir} \ + -j %{_smp_build_ncpus} \ + --verbose \ + %{nil}} + +%meson_install \ + %{shrink:DESTDIR=%{buildroot} %{__meson} install \ + -C %{_vpath_builddir} \ + --no-rebuild \ + %{nil}} + +%meson_test \ + %{shrink:%{__meson} test \ + -C %{_vpath_builddir} \ + --num-processes %{_smp_build_ncpus} \ + --print-errorlogs \ + %{nil}} diff --git a/meson/data/schema.xsd b/meson/data/schema.xsd new file mode 100644 index 000000000..58c6bfde8 --- /dev/null +++ b/meson/data/schema.xsd @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/meson/data/shell-completions/bash/meson b/meson/data/shell-completions/bash/meson new file mode 100644 index 000000000..993885bbd --- /dev/null +++ b/meson/data/shell-completions/bash/meson @@ -0,0 +1,416 @@ +_meson() { + command="${COMP_WORDS[1]}" + case "$command" in + setup |\ + configure |\ + install |\ + introspect |\ + init |\ + test |\ + wrap |\ + subprojects |\ + help) + _meson-$command "${COMP_WORDS[@]:1}" + ;; + *) + _meson-setup "${COMP_WORDS[@]}" + ;; + esac +} && +complete -F _meson meson + +_meson_complete_option() { + option_string=$1 + + if [[ $# -eq 2 ]] && ! [[ "$option_string" == *=* ]]; then + option_string="$option_string=$2" + fi + + if [[ "$option_string" == *=* ]]; then + _meson_complete_option_value "$option_string" + else + _meson_complete_option_name "$option_string" + fi +} + +_meson_complete_option_name() { + option=$1 + options=($(python3 -c 'import sys, json +for option in json.load(sys.stdin): + print(option["name"]) +' <<< "$(_meson_get_options)")) + compopt -o nospace + COMPREPLY=($(compgen -W '${options[@]}' -S= -- "$option")) +} + +_meson_complete_option_value() { + cur=$1 + option_name=${cur%%=*} + option_value=${cur#*=} + + if _meson_complete_filedir "$option_name" "$option_value"; then + return + fi + +# TODO: support all the option types + options=($(python3 -c 'import sys, json +for option in json.load(sys.stdin): + if option["name"] != "'$option_name'": + continue + choices = [] + if option["type"] == "boolean": + choices.append("true") + choices.append("false") + elif option["type"] == "combo": + for choice in option["choices"]: + choices.append(choice) + for choice in choices: + if choice.startswith("'$cur'"): + print(choice) +' <<< "$(_meson_get_options)")) + COMPREPLY=("${options[@]}") +} + +_meson_get_options() { + local options + for builddir in "${COMP_WORDS[@]}"; do + if [ -d "$builddir" ]; then + break + fi + builddir=. + done + options=$(meson introspect "$builddir" --buildoptions 2>/dev/null) && + echo "$options" || + echo '[]' +} + +_meson_complete_filedir() { + _filedir_in() { + pushd "$1" &>/dev/null + local COMPREPLY=() + _filedir + echo "${COMPREPLY[@]}" + popd &>/dev/null + } + + option=$1 + cur=$2 + case $option in + prefix |\ + libdir |\ + libexecdir |\ + bindir |\ + sbindir |\ + includedir |\ + datadir |\ + mandir |\ + infodir |\ + localedir |\ + sysconfdir |\ + localstatedir |\ + sharedstatedir) + _filedir -d + ;; + cross-file) + _filedir + COMPREPLY+=($(_filedir_in "$XDG_DATA_DIRS"/meson/cross)) + COMPREPLY+=($(_filedir_in /usr/local/share/meson/cross)) + COMPREPLY+=($(_filedir_in /usr/share/meson/cross)) + COMPREPLY+=($(_filedir_in "$XDG_DATA_HOME"/meson/cross)) + COMPREPLY+=($(_filedir_in ~/.local/share/meson/cross)) + ;; + *) + return 1;; + esac + return 0 +} + +_meson-setup() { + + shortopts=( + h + D + v + ) + + longopts=( + help + prefix + libdir + libexecdir + bindir + sbindir + includedir + datadir + mandir + infodir + localedir + sysconfdir + localstatedir + sharedstatedir + backend + buildtype + strip + unity + werror + layout + default-library + warnlevel + stdsplit + errorlogs + cross-file + version + wrap-mode + ) + + local cur prev + if _get_comp_words_by_ref cur prev &>/dev/null && + [ "${prev:0:2}" = '--' ] && _meson_complete_option "${prev:2}" "$cur"; then + return + elif _get_comp_words_by_ref cur prev &>/dev/null && + [ "${prev:0:1}" = '-' ] && [ "${prev:1:2}" != '-' ] && _meson_complete_option "${prev:1}"; then + return + elif _get_comp_words_by_ref -n '=' cur prev &>/dev/null; then + if [ $prev == -D ]; then + _meson_complete_option "$cur" + return + fi + else + cur="${COMP_WORDS[COMP_CWORD]}" + fi + + if [[ "$cur" == "--"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + elif [[ "$cur" == "-"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}")) + else + _filedir -d + if [ -z "$cur" ]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}')) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}')) + fi + + if [ $COMP_CWORD -eq 1 ]; then + COMPREPLY+=($(compgen -W 'setup configure test introspect' -- "$cur")) + fi + fi +} + +_meson-configure() { + + shortopts=( + h + D + ) + + longopts=( + help + clearcache + ) + + local cur prev + if _get_comp_words_by_ref -n '=' cur prev &>/dev/null; then + if [ $prev == -D ]; then + _meson_complete_option "$cur" + return + fi + else + cur="${COMP_WORDS[COMP_CWORD]}" + fi + + if [[ "$cur" == "--"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + elif [[ "$cur" == "-"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}")) + else + for dir in "${COMP_WORDS[@]}"; do + if [ -d "$dir" ]; then + break + fi + dir=. + done + if [ ! -d "$dir/meson-private" ]; then + _filedir -d + fi + + if [ -z "$cur" ]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}')) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}')) + fi + fi +} + +_meson-test() { + shortopts=( + q + v + t + C + ) + + longopts=( + quiet + verbose + timeout-multiplier + repeat + no-rebuild + gdb + list + wrapper --wrap + no-suite + suite + no-stdsplit + print-errorlogs + benchmark + logbase + num-processes + setup + test-args + ) + + local cur prev + if _get_comp_words_by_ref -n ':' cur prev &>/dev/null; then + case $prev in + --repeat) + # number, can't be completed + return + ;; + --wrapper) + _command_offset $COMP_CWORD + return + ;; + -C) + _filedir -d + return + ;; + --suite | --no-suite) + for i in "${!COMP_WORDS[@]}"; do + opt="${COMP_WORDS[i]}" + dir="${COMP_WORDS[i+1]}" + case "$opt" in + -C) + break + ;; + esac + dir=. + done + suites=($(python3 -c 'import sys, json; +for test in json.load(sys.stdin): + for suite in test["suite"]: + print(suite) + ' <<< "$(meson introspect "$dir" --tests)")) +# TODO + COMPREPLY+=($(compgen -W "${suites[*]}" -- "$cur")) + return + ;; + --logbase) + # free string, can't be completed + return + ;; + --num-processes) + # number, can't be completed + return + ;; + -t | --timeout-multiplier) + # number, can't be completed + return + ;; + --setup) + # TODO + return + ;; + --test-args) + return + ;; + esac + else + cur="${COMP_WORDS[COMP_CWORD]}" + fi + + if [[ "$cur" == "--"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + elif [[ "$cur" == "-"* && ${#cur} -gt 1 ]]; then + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}")) + else + for dir in "${COMP_WORDS[@]}"; do + if [ -d "$dir" ]; then + break + fi + dir=. + done + if [ ! -d "$dir/meson-private" ]; then + _filedir -d + fi + + for i in "${!COMP_WORDS[@]}"; do + opt="${COMP_WORDS[i]}" + dir="${COMP_WORDS[i+1]}" + case "$opt" in + -C) + break + ;; + esac + dir=. + done + tests=($(python3 -c 'import sys, json; +for test in json.load(sys.stdin): + print(test["name"]) +' <<< "$(meson introspect "$dir" --tests)")) + COMPREPLY+=($(compgen -W "${tests[*]}" -- "$cur")) + + if [ -z "$cur" ]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}")) + fi + fi +} + +_meson-introspect() { + shortopts=( + h + ) + + longopts=( + targets + installed + buildsystem-files + buildoptions + tests + benchmarks + dependencies + projectinfo + ) + + local cur prev + if ! _get_comp_words_by_ref cur prev &>/dev/null; then + cur="${COMP_WORDS[COMP_CWORD]}" + fi + + if [[ "$cur" == "--"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + elif [[ "$cur" == "-"* ]]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}")) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}")) + else + for dir in "${COMP_WORDS[@]}"; do + if [ -d "$dir" ]; then + break + fi + dir=. + done + if [ ! -d "$dir/meson-private" ]; then + _filedir -d + fi + + if [ -z "$cur" ]; then + COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}')) + COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}')) + fi + fi +} + +_meson-wrap() { + : TODO +} diff --git a/meson/data/shell-completions/zsh/_meson b/meson/data/shell-completions/zsh/_meson new file mode 100644 index 000000000..cd9357b04 --- /dev/null +++ b/meson/data/shell-completions/zsh/_meson @@ -0,0 +1,425 @@ +#compdef meson + +# vim:ts=2 sw=2 + +# Copyright (c) 2017 Arseny Maslennikov +# All rights reserved. Individual authors, whether or not +# specifically named, retain copyright in all changes; in what follows, they +# are referred to as `the Meson development team'. This is for convenience +# only and this body has no legal status. This file is distributed under +# the following licence. +# +# Permission is hereby granted, without written agreement and without +# licence or royalty fees, to use, copy, modify, and distribute this +# software and to distribute modified versions of this software for any +# purpose, provided that the above copyright notice and the following +# two paragraphs appear in all copies of this software. +# +# In no event shall the Meson development team be liable to any party for +# direct, indirect, special, incidental, or consequential damages arising out +# of the use of this software and its documentation, even if the Meson +# development team have been advised of the possibility of such damage. +# +# The Meson development team specifically disclaim any warranties, including, +# but not limited to, the implied warranties of merchantability and fitness +# for a particular purpose. The software provided hereunder is on an "as is" +# basis, and the Meson development team have no obligation to provide +# maintenance, support, updates, enhancements, or modifications. + +local curcontext="$curcontext" state line +local -i ret + +local __meson_backends="(ninja xcode ${(j. .)${:-vs{,2010,2015,2017}}})" +local __meson_build_types="(plain debug debugoptimized minsize release)" +local __meson_wrap_modes="(WrapMode.{default,nofallback,nodownload,forcefallback})" +local __meson_dist_formats=("xztar" "gztar" "zip") +local __meson_cd='-C[change into this directory before running]:target dir:_directories' +local -a __meson_common=( + '--prefix=[installation prefix]: :_directories' + '--bindir=[executable directory]: :_directories' + '--datadir=[data file directory]: :_directories' + '--includedir=[header file directory]: :_directories' + '--infodir=[info page directory]: :_directories' + '--libdir=[library directory]: :_directories' + '--libexecdir=[library executable directory]: :_directories' + '--localedir=[locale data directory]: :_directories' + '--localstatedir=[local state data directory]: :_directories' + '--mandir=[manual page directory]: :_directories' + '--sbindir=[system executable directory]: :_directories' + '--sharedstatedir=[arch-independent data directory]: :_directories' + '--sysconfdir=[system configuration directory]: :_directories' + '--auto-features=[default value for auto features]:auto features types:(auto disabled enabled)' + '--backend=[backend to use]:Meson backend:'"$__meson_backends" + '--buildtype=[build type to use]:Meson build type:'"$__meson_build_types" + '--debug[turn on building with debug]' + '--default-library=[default library type]:default library type:(shared static both)' + '--errorlogs[prints the logs from failing tests]' + '--install-umask=[default umask for permissions of all installed files]' + '--layout=[build directory layout]:build directory layout:(flat mirror)' + '--optimization=[optimization level for compiled targets]:optimization:(0 g 1 2 3 s)' + '--stdsplit=[split stdout and stderr in test logs]' + '--strip[strip targets on install]' + '--unity=[unity builds on/off]:whether to do unity builds:(on off subprojects)' + '--warnlevel=[compiler warning level]:compiler warning level:warning level:(1 2 3)' + '--werror[treat warnings as errors]' + '--wrap-mode=[special wrap mode]:wrap mode:'"$__meson_wrap_modes" + '--force-fallback-for=[force fallback for listed subprojects]' + '--pkg-config-path=[extra paths for HOST pkg-config to search]:paths:_dir_list -s ,' + '--build.pkg-config-path=[extra paths for BUILD pkg-config to search]:paths:_dir_list -s ,' + '--cmake-prefix-path=[extra prefixes for HOST cmake to search]:paths:_dir_list -s ,' + '--build.cmake-prefix-path=[extra prefix for BUILD cmake to search]:paths:_dir_list -s ,' +) + +local -a meson_commands=( +'configure:configure a project' +'dist:generate release archive' +'init:create a new project' +'install:install one more more targets' +'introspect:query project properties' +'setup:set up a build directory' +'test:run tests' +'wrap:manage source dependencies' +'subprojects:manage subprojects' +'compile:Build the project' +) + +(( $+functions[__meson_is_build_dir] )) || __meson_is_build_dir() { + local mpd="${1:-$PWD}/meson-private" + [[ -f "$mpd/build.dat" && -f "$mpd/coredata.dat" ]] + return $? +} + +# TODO: implement build option completion +(( $+functions[__meson_build_options] )) || __meson_build_options() {} + +# `meson introspect` currently can provide that information in JSON. +# We can: +# 1) pipe its output to python3 -m json.tool | grep "$alovelyregex" | cut <...> +# 2) teach mintro.py to use a different output format +# (or perhaps just to select the fields printed) + +(( $+functions[__meson_test_names] )) || __meson_test_names() { + local rtests + if rtests="$(_call_program meson meson test ${opt_args[-C]:+-C "$opt_args[-C]"} --list)"; + then + local -a tests=(${(@f)rtests}) + _describe -t "tests" "Meson tests" tests + else + _message -r "current working directory is not a build directory" + _message -r 'use -C $build_dir or cd $build_dir' + fi +} + +(( $+functions[__meson_wrap_names] )) || __meson_wrap_names() { + local rwraps + rwraps="$(_call_program meson meson wrap list)" + local -a wraps=(${(@f)rwraps}) + _describe -t wraps "Meson wraps" wraps +} + +(( $+functions[__meson_installed_wraps] )) || __meson_installed_wraps() { + local rwraps + if rwraps="$(ls subprojects/ | grep '\.wrap$' | cut -d . -f 1)"; then + local -a wraps=(${(@f)rwraps}) + _describe -t wraps "Meson wraps" wraps + fi +} + +(( $+functions[_meson_commands] )) || _meson_commands() { + _describe -t commands "Meson subcommands" meson_commands +} + +(( $+functions[_meson-setup] )) || _meson-setup() { + local firstd secondd + if [[ -f "meson.build" ]]; then + # if there's no second argument on the command line + # cwd will implicitly be substituted: + # - as the source directory if it has a file with the name "meson.build"; + # - as the build directory otherwise + # more info in mesonbuild/mesonmain.py + firstd="build" + secondd="source" + else + firstd="source" + secondd="build" + fi + + _arguments \ + '*-D-[set the value of a build option]:build option:__meson_build_options' \ + '--cross-file=[cross-compilation environment description]:cross file:_files' \ + '--native-file=[build machine compilation environment description]:native file:_files' \ + '--clearcache[clear cached state]' \ + '--fatal-meson-warnings=[exit when any meson warnings are encountered]' \ + '(-v --version)'{'-v','--version'}'[print the meson version and exit]' \ + '--reconfigure=[re-run build configuration]' \ + '--wipe=[delete saved state and restart using saved command line options]' \ + ":$firstd directory:_directories" \ + "::$secondd directory:_directories" \ + "${(@)__meson_common}" +} + +(( $+functions[_meson-configure] )) || _meson-configure() { + local curcontext="$curcontext" + # TODO: implement 'mesonconf @file' + local -a specs=( + '*-D-[set the value of a build option]:build option:__meson_build_options' + '::build directory:_directories' + ) + + _arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" \ + "${(@)__meson_common}" +} + +(( $+functions[_meson-test] )) || _meson-test() { + local curcontext="$curcontext" + + # TODO: complete test suites + local -a specs=( + '--repeat[number of times to run the tests]:number of times to repeat: ' + '--no-rebuild[do not rebuild before running tests]' + '--gdb[run tests under gdb]' + '--gdb-path=[program to run for gdb (can be wrapper or compaitble program)]:program:_path_commands' + '--list[list available tests]' + '(--wrapper --wrap)'{'--wrapper=','--wrap='}'[wrapper to run tests with]:wrapper program:_path_commands' + "$__meson_cd" + '(--suite)--no-suite[do not run tests from this suite]:test suite: ' + '(--no-suite)--suite[only run tests from this suite]:test suite: ' + '--no-stdsplit[do not split stderr and stdout in logs]' + '--print-errorlogs[print logs for failing tests]' + '--benchmark[run benchmarks instead of tests]' + '--logbase[base name for log file]:filename: ' + '--num-processes[how many threads to use]:number of processes: ' + '(--verbose -v)'{'--verbose','-v'}'[do not redirect stdout and stderr]' + '(--quiet -q)'{'--quiet','-q'}'[produce less output to the terminal]' + '(--timeout-multiplier -t)'{'--timeout-multiplier','-t'}'[a multiplier for test timeouts]:Python floating-point number: ' + '--setup[which test setup to use]:test setup: ' + '--test-args[arguments to pass to the tests]: : ' + '*:Meson tests:__meson_test_names' + ) + + _arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-install] )) || _meson-install() { + local curcontext="$curcontext" + local -a specs=( + "$__meson_cd" + '--no-rebuild[Do not rebuild before installing]' + '--only-changed[Do not overwrite files that are older than the copied file]' + '--quiet[Do not print every fiel that was installed]' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-introspect] )) || _meson-introspect() { + local curcontext="$curcontext" + local -a specs=( + '--ast[dump the ASK of the meson file]' + '--benchmarks[list all benchmarks]' + '--buildoptions[list all build options]' + '--buildsystem-files[list files that belong to the build system]' + '--dependencies[list external dependencies]' + '--installed[list all installed files and directories]' + '--projectinfo[show project information]' + '--targets[list top level targets]' + '--tests[list all unit tests]' + '--backend=[backend to use]:Meson backend:'"$__meson_backends" + '::build directory:_directories' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-init] )) || _meson-init() { + local curcontext="$curcontext" + local -a specs=( + "$__meson_cd" + '(-n --name)'{'-n','--name'}'=[the name of the project (defaults to directory name)]' + '(-e --executable)'{'-e','--executable'}'=[the name of the executable target to create (defaults to project name)]' + '(-d --deps)'{'-d','--deps'}'=[comma separated list of dependencies]' + '(-l --language)'{'-l','--language'}'=[comma separated list of languages (autodetected based on sources if unset)]:languages:_values , (c cpp cs cuda d fortran java objc objcpp rust)' + '(-b --build)'{'-b','--build'}'[build the project immediately after generation]' + '--builddir=[directory for building]:directory:_directories' + '(-f --force)'{'-f','--force'}'[overwrite any existing files and directories]' + '(-t --type)'{'-t','--type'}'=[project type, defaults to executable]:type:(executable library)' + '(-v --version)'{'-v','--version'}'[print the meson version and exit]' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-wrap] )) || _meson-wrap() { + local -a commands=( + 'list:list all available wraps' + 'search:search the db by name' + 'install:install the specified project' + 'update:Update a project to its newest available version' + 'info:Show info about a wrap' + 'status:Show the status of your subprojects' + ) + + if (( CURRENT == 2 )); then + _describe -t commands "Meson wrap subcommands" commands + else + local curcontext="$curcontext" + cmd="${${commands[(r)$words[2]:*]%%:*}}" + if (( $#cmd )); then + if [[ $cmd == status ]]; then + _message "no options" + elif [[ $cmd == "list" ]]; then + _arguments '*:meson wraps' + elif [[ $cmd == "search" ]]; then + _arguments '*:meson wraps' + elif [[ $cmd == "install" ]]; then + _arguments '*:meson wraps:__meson_wrap_names' + elif [[ $cmd == "update" ]]; then + _arguments '*:meson wraps:__meson_installed_wraps' + elif [[ $cmd == "info" ]]; then + _arguments '*:meson wraps:__meson_wrap_name' + elif [[ $cmd == "status" ]]; then + _arguments '*:' + elif [[ $cmd == "promote" ]]; then + # TODO: how do you figure out what wraps are provided by subprojects if + # they haven't been fetched yet? + _arguments '*:' + fi + else + _message "unknown meson wrap command: $words[2]" + fi + fi + +} + +(( $+functions[_meson-dist] )) || _meson-dist() { + local curcontext="$curcontext" + local -a specs=( + '--formats=[comma separated list of archive types to create]:archive formats:_values -s , format '"$__meson_dist_formats" + '--include-subprojects[Include source code of subprojects that have been used for the build]' + '--no-tests[Do not build and test generated packages]' + "$__meson_cd" + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-update] )) || _meson-subprojects-update() { + local curcontext="$curcontext" + local -a specs=( + "--rebase[rebase your branch on top of wrap's revision (git only)]" + '--sourcedir=[path to source directory]:_directories' + '*:subprojects:__meson_installed_wraps' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-checkout] )) || _meson-subprojects-checkout() { + local curcontext="$curcontext" + local -a specs=( + '-b[create a new branch]' + '--sourcedir=[path to source directory]:_directories' + # FIXME: this doesn't work exactly right, but I can't figure it out + ':branch name' + '*:subprojects:__meson_installed_wraps' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-download] )) || _meson-subprojects-download() { + local curcontext="$curcontext" + local -a specs=( + '--sourcedir=[path to source directory]:_directories' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects-foreach] )) || _meson-subprojects-foreach() { + local curcontext="$curcontext" + local -a specs=( + '--sourcedir=[path to source directory]:_directories' + '*:command:_command_names -e' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +(( $+functions[_meson-subprojects] )) || _meson-subprojects() { + local -a commands=( + 'update:update all subprojects from wrap files' + 'checkout:checkout a branch (git only)' + 'download:ensure subprojects are fetched, even if not in use. Already downloaded subprojects are not modified.' + 'foreach:execute a command in each subproject directory' + ) + + if (( CURRENT == 2 )); then + _describe -t commands "Meson subproject subcommands" commands + else + local curcontext="$curcontext" + cmd="${${commands[(r)$words[2]:*]%%:*}}" + if (( $#cmd )); then + if [[ $cmd == status ]]; then + _message "no options" + else + _meson-subprojects-$cmd + fi + else + _message "unknown meson subproject command: $words[2]" + fi + fi + +} + +(( $+functions[_meson-compile] )) || _meson-compile() { + local curcontext="$curcontext" + local -a specs=( + "$__meson_cd" + '--clean[Clean the build directory]' + '(-j --jobs)'{'-j','--jobs'}'=[the number fo work jobs to run (if supported)]:_guard "[0-9]#" "number of jobs"' + '(-l --load-averate)'{'-l','--load-average'}'=[the system load average to try to maintain (if supported)]:_guard "[0-9]#" "load average"' + '(-v --verbose)'{'-v','--verbose'}'[Show more output]' + '--ninja-args=[Arguments to pass to ninja (only when using ninja)]' + '--vs-args=[Arguments to pass to vs (only when using msbuild)]' + ) +_arguments \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + "${(@)specs}" +} + +if [[ $service != meson ]]; then + _call_function ret _$service + return ret +fi + +_arguments -C -R \ + '(: -)'{'--help','-h'}'[show a help message and quit]' \ + '(: -)'{'--version','-v'}'[show version information and quit]' \ + '(-): :_meson_commands' \ + '*:: :->post-command' \ +# +ret=$? + +[[ $ret = 300 ]] && case "$state" in + post-command) + service="meson-$words[1]" + curcontext=${curcontext%:*:*}:$service: + _call_function ret _$service + ;; +esac + +return ret + diff --git a/meson/data/syntax-highlighting/emacs/meson.el b/meson/data/syntax-highlighting/emacs/meson.el new file mode 100644 index 000000000..a640bbe71 --- /dev/null +++ b/meson/data/syntax-highlighting/emacs/meson.el @@ -0,0 +1,31 @@ +;; keywords for syntax coloring +(setq meson-keywords + `( + ( ,(regexp-opt '("elif" "if" "else" "endif" "foreach" "endforeach") 'word) . font-lock-keyword-face) + ) + ) + +;; syntax table +(defvar meson-syntax-table nil "Syntax table for `meson-mode'.") +(setq meson-syntax-table + (let ((synTable (make-syntax-table))) + + ;; bash style comment: “# …†+ (modify-syntax-entry ?# "< b" synTable) + (modify-syntax-entry ?\n "> b" synTable) + + synTable)) + +;; define the major mode. +(define-derived-mode meson-mode prog-mode + "meson-mode is a major mode for editing Meson build definition files." + :syntax-table meson-syntax-table + + (setq font-lock-defaults '(meson-keywords)) + (setq mode-name "meson") + (setq-local comment-start "# ") + (setq-local comment-end "")) + +(add-to-list 'auto-mode-alist '("meson.build" . meson-mode)) +(provide 'meson) +;;; meson.el ends here diff --git a/meson/data/syntax-highlighting/vim/README b/meson/data/syntax-highlighting/vim/README new file mode 100644 index 000000000..95188fcc7 --- /dev/null +++ b/meson/data/syntax-highlighting/vim/README @@ -0,0 +1,4 @@ +ftdetect sets the filetype +ftplugin sets Meson indentation rules +indent does Meson indentation +syntax does Meson syntax highlighting diff --git a/meson/data/syntax-highlighting/vim/ftdetect/meson.vim b/meson/data/syntax-highlighting/vim/ftdetect/meson.vim new file mode 100644 index 000000000..3233c5875 --- /dev/null +++ b/meson/data/syntax-highlighting/vim/ftdetect/meson.vim @@ -0,0 +1,3 @@ +au BufNewFile,BufRead meson.build set filetype=meson +au BufNewFile,BufRead meson_options.txt set filetype=meson +au BufNewFile,BufRead *.wrap set filetype=dosini diff --git a/meson/data/syntax-highlighting/vim/ftplugin/meson.vim b/meson/data/syntax-highlighting/vim/ftplugin/meson.vim new file mode 100644 index 000000000..d48fa1dfd --- /dev/null +++ b/meson/data/syntax-highlighting/vim/ftplugin/meson.vim @@ -0,0 +1,20 @@ +" Vim filetype plugin file +" Language: meson +" License: VIM License +" Maintainer: Liam Beguin +" Original Author: Laurent Pinchart +" Last Change: 2018 Nov 27 + +if exists("b:did_ftplugin") | finish | endif +let b:did_ftplugin = 1 +let s:keepcpo= &cpo +set cpo&vim + +setlocal commentstring=#\ %s +setlocal comments=:# + +setlocal shiftwidth=2 +setlocal softtabstop=2 + +let &cpo = s:keepcpo +unlet s:keepcpo diff --git a/meson/data/syntax-highlighting/vim/indent/meson.vim b/meson/data/syntax-highlighting/vim/indent/meson.vim new file mode 100644 index 000000000..ec0cbb4ae --- /dev/null +++ b/meson/data/syntax-highlighting/vim/indent/meson.vim @@ -0,0 +1,181 @@ +" Vim indent file +" Language: Meson +" License: VIM License +" Maintainer: Nirbheek Chauhan +" Liam Beguin +" Original Authors: David Bustos +" Bram Moolenaar +" Last Change: 2015 Feb 23 + +" Only load this indent file when no other was loaded. +if exists("b:did_indent") + finish +endif +let b:did_indent = 1 + +" Some preliminary settings +setlocal nolisp " Make sure lisp indenting doesn't supersede us +setlocal autoindent " indentexpr isn't much help otherwise + +setlocal indentexpr=GetMesonIndent(v:lnum) +setlocal indentkeys+==elif,=else,=endforeach,=endif,0) + +" Only define the function once. +if exists("*GetMesonIndent") + finish +endif +let s:keepcpo= &cpo +set cpo&vim + +" Come here when loading the script the first time. + +let s:maxoff = 50 " maximum number of lines to look backwards for () + +function GetMesonIndent(lnum) + echom getline(line(".")) + + " If this line is explicitly joined: If the previous line was also joined, + " line it up with that one, otherwise add two 'shiftwidth' + if getline(a:lnum - 1) =~ '\\$' + if a:lnum > 1 && getline(a:lnum - 2) =~ '\\$' + return indent(a:lnum - 1) + endif + return indent(a:lnum - 1) + (exists("g:mesonindent_continue") ? eval(g:mesonindent_continue) : (shiftwidth() * 2)) + endif + + " If the start of the line is in a string don't change the indent. + if has('syntax_items') + \ && synIDattr(synID(a:lnum, 1, 1), "name") =~ "String$" + return -1 + endif + + " Search backwards for the previous non-empty line. + let plnum = prevnonblank(v:lnum - 1) + + if plnum == 0 + " This is the first non-empty line, use zero indent. + return 0 + endif + + " If the previous line is inside parenthesis, use the indent of the starting + " line. + " Trick: use the non-existing "dummy" variable to break out of the loop when + " going too far back. + call cursor(plnum, 1) + let parlnum = searchpair('(\|{\|\[', '', ')\|}\|\]', 'nbW', + \ "line('.') < " . (plnum - s:maxoff) . " ? dummy :" + \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')" + \ . " =~ '\\(Comment\\|Todo\\|String\\)$'") + if parlnum > 0 + let plindent = indent(parlnum) + let plnumstart = parlnum + else + let plindent = indent(plnum) + let plnumstart = plnum + endif + + + " When inside parenthesis: If at the first line below the parenthesis add + " a 'shiftwidth', otherwise same as previous line. + " i = (a + " + b + " + c) + call cursor(a:lnum, 1) + let p = searchpair('(\|{\|\[', '', ')\|}\|\]', 'bW', + \ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :" + \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')" + \ . " =~ '\\(Comment\\|Todo\\|String\\)$'") + if p > 0 + if p == plnum + " When the start is inside parenthesis, only indent one 'shiftwidth'. + let pp = searchpair('(\|{\|\[', '', ')\|}\|\]', 'bW', + \ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :" + \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')" + \ . " =~ '\\(Comment\\|Todo\\|String\\)$'") + if pp > 0 + return indent(plnum) + (exists("g:pyindent_nested_paren") ? eval(g:pyindent_nested_paren) : shiftwidth()) + endif + return indent(plnum) + (exists("g:pyindent_open_paren") ? eval(g:pyindent_open_paren) : shiftwidth()) + endif + if plnumstart == p + return indent(plnum) + endif + return plindent + endif + + + " Get the line and remove a trailing comment. + " Use syntax highlighting attributes when possible. + let pline = getline(plnum) + let pline_len = strlen(pline) + if has('syntax_items') + " If the last character in the line is a comment, do a binary search for + " the start of the comment. synID() is slow, a linear search would take + " too long on a long line. + if synIDattr(synID(plnum, pline_len, 1), "name") =~ "\\(Comment\\|Todo\\)$" + let min = 1 + let max = pline_len + while min < max + let col = (min + max) / 2 + if synIDattr(synID(plnum, col, 1), "name") =~ "\\(Comment\\|Todo\\)$" + let max = col + else + let min = col + 1 + endif + endwhile + let pline = strpart(pline, 0, min - 1) + endif + else + let col = 0 + while col < pline_len + if pline[col] == '#' + let pline = strpart(pline, 0, col) + break + endif + let col = col + 1 + endwhile + endif + + " If the previous line ended the conditional/loop + if getline(plnum) =~ '^\s*\(endif\|endforeach\)\>\s*' + " Maintain indent + return -1 + endif + + " If the previous line ended with a builtin, indent this line + if pline =~ '^\s*\(foreach\|if\|else\|elif\)\>\s*' + return plindent + shiftwidth() + endif + + " If the current line begins with a header keyword, deindent + if getline(a:lnum) =~ '^\s*\(else\|elif\|endif\|endforeach\)' + + " Unless the previous line was a one-liner + if getline(plnumstart) =~ '^\s*\(foreach\|if\)\>\s*' + return plindent + endif + + " Or the user has already dedented + if indent(a:lnum) <= plindent - shiftwidth() + return -1 + endif + + return plindent - shiftwidth() + endif + + " When after a () construct we probably want to go back to the start line. + " a = (b + " + c) + " here + if parlnum > 0 + return plindent + endif + + return -1 + +endfunction + +let &cpo = s:keepcpo +unlet s:keepcpo + +" vim:sw=2 diff --git a/meson/data/syntax-highlighting/vim/syntax/meson.vim b/meson/data/syntax-highlighting/vim/syntax/meson.vim new file mode 100644 index 000000000..215a3b77f --- /dev/null +++ b/meson/data/syntax-highlighting/vim/syntax/meson.vim @@ -0,0 +1,154 @@ +" Vim syntax file +" Language: Meson +" License: VIM License +" Maintainer: Nirbheek Chauhan +" Liam Beguin +" Last Change: 2016 Dec 7 +" Credits: Zvezdan Petkovic +" Neil Schemenauer +" Dmitry Vasiliev +" +" This version is copied and edited from python.vim +" It's very basic, and doesn't do many things I'd like it to +" For instance, it should show errors for syntax that is valid in +" Python but not in Meson. +" +" Optional highlighting can be controlled using these variables. +" +" let meson_space_error_highlight = 1 +" + +if exists("b:current_syntax") + finish +endif + +" We need nocompatible mode in order to continue lines with backslashes. +" Original setting will be restored. +let s:cpo_save = &cpo +set cpo&vim + +" http://mesonbuild.com/Syntax.html +syn keyword mesonConditional elif else if endif +syn keyword mesonRepeat foreach endforeach +syn keyword mesonOperator and not or in +syn keyword mesonStatement continue break + +syn match mesonComment "#.*$" contains=mesonTodo,@Spell +syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained + +" Strings can either be single quoted or triple counted across multiple lines, +" but always with a ' +syn region mesonString + \ start="\z('\)" end="\z1" skip="\\\\\|\\\z1" + \ contains=mesonEscape,@Spell +syn region mesonString + \ start="\z('''\)" end="\z1" keepend + \ contains=mesonEscape,mesonSpaceError,@Spell + +syn match mesonEscape "\\[abfnrtv'\\]" contained +syn match mesonEscape "\\\o\{1,3}" contained +syn match mesonEscape "\\x\x\{2}" contained +syn match mesonEscape "\%(\\u\x\{4}\|\\U\x\{8}\)" contained +" Meson allows case-insensitive Unicode IDs: http://www.unicode.org/charts/ +syn match mesonEscape "\\N{\a\+\%(\s\a\+\)*}" contained +syn match mesonEscape "\\$" + +" Meson only supports integer numbers +" http://mesonbuild.com/Syntax.html#numbers +syn match mesonNumber "\<\d\+\>" + +" booleans +syn keyword mesonConstant false true + +" Built-in functions +syn keyword mesonBuiltin + \ add_global_arguments + \ add_global_link_arguments + \ add_languages + \ add_project_arguments + \ add_project_link_arguments + \ add_test_setup + \ alias_target + \ assert + \ benchmark + \ both_libraries + \ build_machine + \ build_target + \ configuration_data + \ configure_file + \ custom_target + \ declare_dependency + \ dependency + \ disabler + \ environment + \ error + \ executable + \ files + \ find_library + \ find_program + \ generator + \ get_option + \ get_variable + \ gettext + \ host_machine + \ import + \ include_directories + \ install_data + \ install_headers + \ install_man + \ install_subdir + \ is_disabler + \ is_variable + \ jar + \ join_paths + \ library + \ meson + \ message + \ option + \ project + \ run_command + \ run_target + \ set_variable + \ shared_library + \ shared_module + \ static_library + \ subdir + \ subdir_done + \ subproject + \ summary + \ target_machine + \ test + \ vcs_tag + \ warning + \ range + +if exists("meson_space_error_highlight") + " trailing whitespace + syn match mesonSpaceError display excludenl "\s\+$" + " mixed tabs and spaces + syn match mesonSpaceError display " \+\t" + syn match mesonSpaceError display "\t\+ " +endif + +" The default highlight links. Can be overridden later. +hi def link mesonStatement Statement +hi def link mesonConditional Conditional +hi def link mesonRepeat Repeat +hi def link mesonOperator Operator +hi def link mesonComment Comment +hi def link mesonTodo Todo +hi def link mesonString String +hi def link mesonEscape Special +hi def link mesonNumber Number +hi def link mesonBuiltin Function +hi def link mesonConstant Number +if exists("meson_space_error_higlight") + hi def link mesonSpaceError Error +endif + +let b:current_syntax = "meson" + +let &cpo = s:cpo_save +unlet s:cpo_save + +" vim:set sw=2 sts=2 ts=8 noet: diff --git a/meson/data/test.schema.json b/meson/data/test.schema.json new file mode 100644 index 000000000..a809388b4 --- /dev/null +++ b/meson/data/test.schema.json @@ -0,0 +1,179 @@ +{ + "type": "object", + "additionalProperties": false, + "properties": { + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "installed": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "file": { + "type": "string" + }, + "type": { + "type": "string", + "enum": [ + "file", + "python_file", + "dir", + "exe", + "shared_lib", + "python_lib", + "pdb", + "implib", + "py_implib", + "implibempty", + "expr" + ] + }, + "platform": { + "type": "string", + "enum": [ + "msvc", + "gcc", + "cygwin", + "!cygwin" + ] + }, + "version": { + "type": "string" + }, + "language": { + "type": "string" + } + }, + "required": [ + "file", + "type" + ] + } + }, + "matrix": { + "type": "object", + "properties": { + "options": { + "additionalProperties": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "val": { + "type": ["string", "boolean", "null", "array"], + "items": { + "type": "string" + } + }, + "compilers": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "skip_on_env": { + "type": "array", + "items": { + "type": "string" + } + }, + "skip_on_jobname": { + "type": "array", + "items": { + "type": "string" + } + }, + "skip_on_os": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "val" + ] + } + }, + "exclude": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "type": ["string", "boolean", "array"], + "items": { + "type": "string" + } + } + } + } + } + } + }, + "do_not_set_opts": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "libdir", + "prefix" + ] + } + }, + "tools": { + "type": "object" + }, + "stdout": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "line": { + "type": "string" + }, + "match": { + "type": "string", + "enum": [ + "literal", + "re" + ] + }, + "count": { + "type": "integer" + }, + "comment": { + "type": "string" + } + }, + "required": [ + "line" + ] + } + }, + "skip_on_env": { + "type": "array", + "items": { + "type": "string" + } + }, + "skip_on_jobname": { + "type": "array", + "items": { + "type": "string" + } + }, + "skip_on_os": { + "type": "array", + "items": { + "type": "string" + } + } + } +} diff --git a/meson/docs/.editorconfig b/meson/docs/.editorconfig new file mode 100644 index 000000000..b5276f173 --- /dev/null +++ b/meson/docs/.editorconfig @@ -0,0 +1,2 @@ +[sitemap.txt] +indent_style = tab diff --git a/meson/docs/README.md b/meson/docs/README.md new file mode 100644 index 000000000..55fc3ec81 --- /dev/null +++ b/meson/docs/README.md @@ -0,0 +1,40 @@ +# Meson Documentation + +## Build dependencies + +Meson uses itself and [hotdoc](https://github.com/hotdoc/hotdoc) for generating documentation. + +Minimum required version of hotdoc is *0.8.9*. + +Instructions on how to install hotdoc are [here](https://hotdoc.github.io/installing.html). + +## Building the documentation + +From the Meson repository root dir: +``` +$ cd docs/ +$ meson built_docs +$ ninja -C built_docs/ upload +``` +Now you should be able to open the documentation locally +``` +built_docs/Meson documentation-doc/html/index.html +``` + +## Upload + +Meson uses the git-upload hotdoc plugin which basically +removes the html pages and replaces with the new content. + +You can simply run: +``` +$ ninja -C built_docs/ upload +``` + +## Contributing to the documentation + +Commits that only change documentation should have `[skip ci]` in their commit message, so CI is not run (it is quite slow). +For example: +``` +A commit message [skip ci] +``` diff --git a/meson/docs/genrelnotes.py b/meson/docs/genrelnotes.py new file mode 100755 index 000000000..082ab45f0 --- /dev/null +++ b/meson/docs/genrelnotes.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 + +# Copyright 2019 The Meson development team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +''' + Generates release notes for new releases of Meson build system +''' +import subprocess +import sys +import os +from glob import glob + +RELNOTE_TEMPLATE = '''--- +title: Release {} +short-description: Release notes for {} +... + +# New features + +''' + + +def add_to_sitemap(from_version, to_version): + ''' + Adds release note entry to sitemap.txt. + ''' + sitemapfile = '../sitemap.txt' + s_f = open(sitemapfile, encoding='utf-8') + lines = s_f.readlines() + s_f.close() + with open(sitemapfile, 'w', encoding='utf-8') as s_f: + for line in lines: + if 'Release-notes' in line and from_version in line: + new_line = line.replace(from_version, to_version) + s_f.write(new_line) + s_f.write(line) + +def generate(from_version, to_version): + ''' + Generate notes for Meson build next release. + ''' + ofilename = f'Release-notes-for-{to_version}.md' + with open(ofilename, 'w', encoding='utf-8') as ofile: + ofile.write(RELNOTE_TEMPLATE.format(to_version, to_version)) + for snippetfile in glob('snippets/*.md'): + snippet = open(snippetfile, encoding='utf-8').read() + ofile.write(snippet) + if not snippet.endswith('\n'): + ofile.write('\n') + ofile.write('\n') + subprocess.check_call(['git', 'rm', snippetfile]) + subprocess.check_call(['git', 'add', ofilename]) + add_to_sitemap(from_version, to_version) + +if __name__ == '__main__': + if len(sys.argv) != 3: + print(sys.argv[0], 'from_version to_version') + sys.exit(1) + FROM_VERSION = sys.argv[1] + TO_VERSION = sys.argv[2] + os.chdir('markdown') + generate(FROM_VERSION, TO_VERSION) diff --git a/meson/docs/markdown/ARM-performance-test.md b/meson/docs/markdown/ARM-performance-test.md new file mode 100644 index 000000000..4be6e4aab --- /dev/null +++ b/meson/docs/markdown/ARM-performance-test.md @@ -0,0 +1,81 @@ +# Arm performance test + +Performance differences in build systems become more apparent on +slower platforms. To examine this difference we compared the +performance of Meson with GNU Autotools. We took the GLib software +project and rewrote its build setup with Meson. GLib was chosen +because it is a relatively large C code base which requires lots of +low level configuration. + +The Meson version of the build system is not fully equivalent to the +original Autotools one. It does not do all the same configuration +steps and does not build all the same targets. The biggest missing +piece being internationalisation support with Gettext. However it does +configure the system enough to build all C source and run all unit +tests. + +All measurements were done on a Nexus 4 smart phone running the latest +Ubuntu touch image (updated on September 9th 2013). + +Measurements +------ + +The first thing we measured was the time it took to run the configure step. + +![GLib config time](images/glib_conf.png) + +Meson takes roughly 20 seconds whereas Autotools takes 220. This is a +difference of one order of magnitude. Autotools' time contains both +autogen and configure. Again it should be remembered that Meson does +not do all the configure steps that Autotools does. It does do about +90% of them and it takes just 10% of the time to do it. + +Then we measured the build times. Two parallel compilation processes +were used for both systems. + +![GLib build time](images/glib_build.png) + +On desktop machines Ninja based build systems are 10-20% faster than +Make based ones. On this platform the difference grows to 50%. The +difference is probably caused by Make's inefficient disk access +patterns. Ninja is better at keeping both cores running all the time +which yields impressive performance improvements. + +![GLib no-op time](images/glib_empty.png) + +Next we measured the "empty build" case. That is, how long does it +take for the build system to detect that no changes need to be +made. This is one of the most important metrics of build systems +because it places a hard limit on how fast you can iterate on your +code. Autotools takes 14 seconds to determine that no work needs to be +done. Meson (or, rather, Ninja) takes just one quarter of a second. + +![GLib link time](images/glib_link.png) + +One step which takes quite a lot of time is linking. A common case is +that you are working on a library and there are tens of small test +executables that link to it. Even if the compilation step would be +fast, relinking all of the test executables takes time. It is common +for people to manually compile only one test application with a +command such as `make sometest` rather than rebuild everything. + +Meson has an optimization for this case. Whenever a library is +rebuilt, Meson inspects the ABI it exports. If it has not changed, +Meson will skip all relinking steps as unnecessary. The difference +this makes can be clearly seen in the chart above. In that test the +source was fully built, then the file `glib/gbytes.c` was touched to +force the rebuild of the base glib shared library. As can be seen, +Autotools then relinks all test executables that link with glib. Since +Meson can detect that the ABI is the same it can skip those steps. The +end result being that Meson is almost one hundred times faster on this +very common use case. + +Conclusions +----- + +One of the main drawbacks of C and C++ compared to languages such as +Java are long compilation times. However at least some of the blame +can be found in the build tools used rather than the languages +themselves or their compilers. Choosing proper tools can bring C and +C++ compilation very close to instantaneous rebuilds. This has a +direct impact on programmer productivity. diff --git a/meson/docs/markdown/Adding-arguments.md b/meson/docs/markdown/Adding-arguments.md new file mode 100644 index 000000000..adbc23e13 --- /dev/null +++ b/meson/docs/markdown/Adding-arguments.md @@ -0,0 +1,72 @@ +--- +short-description: Adding compiler arguments +... + +# Adding arguments + +Often you need to specify extra compiler arguments. Meson provides two +different ways to achieve this: global arguments and per-target +arguments. + +Global arguments +-- + +Global compiler arguments are set with the following command. As an +example you could do this. + +```meson +add_global_arguments('-DFOO=bar', language : 'c') +``` + +This makes Meson add the define to all C compilations. Usually you +would use this setting for flags for global settings. Note that for +setting the C/C++ language standard (the `-std=c99` argument in GCC), +you would probably want to use a default option of the `project()` +function. For details see the [reference manual](Reference-manual.md). + +Global arguments have certain limitations. They all have to be defined +before any build targets are specified. This ensures that the global +flags are the same for every single source file built in the entire +project with one exception. Compilation tests that are run as part of +your project configuration do not use these flags. The reason for that +is that you may need to run a test compile with and without a given +flag to determine your build setup. For this reason tests do not use +these global arguments. + +You should set only the most essential flags with this setting, you +should *not* set debug or optimization flags. Instead they should be +specified by selecting an appropriate build type. + +Project arguments +-- + +Project arguments work similar to global arguments except that they +are valid only within the current subproject. The usage is simple: + +```meson +add_project_arguments('-DMYPROJ=projname', language : 'c') +``` + +This would add the compiler flags to all C sources in the current +project. + +Per target arguments +-- + +Per target arguments are just as simple to define. + +```meson +executable('prog', 'prog.cc', cpp_args : '-DCPPTHING') +``` + +Here we create a C++ executable with an extra argument that is used +during compilation but not for linking. + +You can find the parameter name for other languages in the [reference +tables](Reference-tables.md). + +Specifying extra linker arguments is done in the same way: + +```meson +executable('prog', 'prog.cc', link_args : '-Wl,--linker-option') +``` diff --git a/meson/docs/markdown/Adding-new-projects-to-wrapdb.md b/meson/docs/markdown/Adding-new-projects-to-wrapdb.md new file mode 100644 index 000000000..51a0d7171 --- /dev/null +++ b/meson/docs/markdown/Adding-new-projects-to-wrapdb.md @@ -0,0 +1,267 @@ +# Adding new projects to WrapDB + + +## How it works + +Each wrap repository has a master branch with only one initial commit +and *no* wrap files. And that is the only commit ever made on that +branch. + +For every release of a project a new branch is created. The new branch +is named after the the upstream release number (e.g. `1.0.0`). This +branch holds a wrap file for this particular release. + +There are two types of wraps on WrapDB - regular wraps and wraps with +Meson build definition patches. A wrap file in a repository on WrapDB +must have a name `upstream.wrap`. + +Wraps with Meson build definition patches work in much the same way as +Debian: we take the unaltered upstream source package and add a new +build system to it as a patch. These build systems are stored as Git +repositories on GitHub. They only contain build definition files. You +may also think of them as an overlay to upstream source. + +Whenever a new commit is pushed into GitHub's project branch, a new +wrap is generated with an incremented version number. All the old +releases remain unaltered. New commits are always done via GitHub +merge requests and must be reviewed by someone other than the +submitter. + +Note that your Git repo with wrap must not contain the subdirectory of +the source release. That gets added automatically by the service. You +also must not commit any source code from the original tarball into +the wrap repository. + +## Choosing the repository name + +Wrapped subprojects are used much like external dependencies. Thus +they should have the same name as the upstream projects. + +NOTE: Repo names must fully match this regexp: `[a-z0-9._]+`. + +If the project provides a pkg-config file, then the repository name +should be the same as the pkg-config name. Usually this is the name of +the project, such as `libpng`. Sometimes it is slightly different, +however. As an example the libogg project's chosen pkg-config name is +`ogg` instead of `libogg`, which is the reason why the repository is +named plain `ogg`. + +If there is no a pkg-config file, the name the project uses/promotes +should be used, lowercase only (Catch2 -> catch2). + +If the project name is too generic or ambiguous (e.g. `benchmark`), +consider using `organization-project` naming format (e.g. +`google-benchmark`). + +## How to contribute a new wrap + +If the project already uses Meson build system, then only a wrap file +- `upstream.wrap` should be provided. In other case a Meson build +definition patch - a set of `meson.build` files - should be also +provided. + +### Request a new repository + +*Note:* you should only do this if you have written the build files +and want to contribute them for inclusion to WrapDB. The maintainers +have only limited reesources and unfortunately can not take requests +to write Meson build definitions for arbitrary projects. + +The submission starts by creating an issue on the [wrapdb bug +tracker](https://github.com/mesonbuild/wrapdb/issues) using *Title* +and *Description* below as a template. + +*Title:* `new wrap: ` + +*Description:* +``` +upstream url: +version: +``` + +Wait until the new repository or branch is created. A link to the new +repository or branch will be posted in a comment to this issue. After +this you can createa a merge request in that repository for your build +files. + +NOTE: Requesting a branch is not necessary. WrapDB maintainer can +create the branch and modify the PR accordingly if the project +repository exists. + +### Creating the wrap contents + +Setting up the contents might seem a bit counterintuitive at first. +Just remember that the outcome needs to have one (and only one) commit +that has all the build definition files (i.e. `meson.build` and +`meson_options.txt` files) and _nothing else_. It is good practice to +have this commit in a branch whose name matches the release as +described above. + +First you need to fork the repository to your own page using GitHub's +fork button. Then you can clone the repo to your local machine. + + +``` +git clone git@github.com:yourusername/libfoo.git foo-wrap +``` + +Create a new branch for your work: + +``` +git checkout -b 1.0.0 +``` + +If you are adding new changes to an existing branch, leave out the +`-b` argument. + +Now you need to copy the source code for the original project to this +directory. If you already have it extracted somewhere, you'd do +something like this: + +``` +cd /path/to/source/extract/dir +cp -r * /path/to/foo-wrap +``` + +Now all the files should be in the wrap directory. Do _not_ add them +to Git, though. Neither now or at any time during this process. The +repo must contain only the newly created build files. + +New release branches require an `upstream.wrap` file, so create one if +needed. + +``` +${EDITOR} upstream.wrap +``` + +The file format is simple, see any existing wrapdb repo for the +content. The checksum is SHA-256 and can be calculated with the +following command on most unix-like operating systems: + +``` +sha256sum path/to/libfoo-1.0.0.tar.gz +``` + +Under macOS the command is the following: + +``` +shasum -a 256 path/to/libfoo-1.0.0.tar.gz +``` + +Next you need to add the entries that define what dependencies the +current project provides. This is important, as it is what makes +Meson's automatic dependency resolver work. It is done by adding a +`provide` entry at the end of the `upstream.wrap` file. Using the Ogg +library as an example, this is what it would look like: + +```ini +[provide] +ogg = ogg_dep +``` + +The `ogg` part on the left refers to the dependency name, which should +be the same as its Pkg-Config name. `ogg_dep` on the right refers to +the variable in the build definitions that provides the dependency. +Most commonly it holds the result of a `declare_dependency` call. If a +variable of that name is not defined, Meson will exit with a hard +error. For further details see [the main Wrap +manual](Wrap-dependency-system-manual.md). + +Now you can create the build files and work on them until the project +builds correctly. + +``` +${EDITOR} meson.build meson_options.txt +``` + +When you are satisfied with the results, add the build files to Git +and push the result to GitHub. + +``` + +git add upstream.wrap meson.build +git commit -a -m 'Add wrap files for libfoo-1.0.0' +git push -u origin 1.0.0 +``` + +Now you should create a pull request on GitHub. Try to create it +against the correct branch rather than master (`1.0.0` branch in this +example). GitHub should do this automatically. + +If the branch doesn't exist file a pull request against master. +WrapDB maintainers can fix it before merging. + +If packaging review requires you to do changes, use the `--amend` +argument to `commit` so that your branch will continue to have only +one commit. + +``` +${EDITOR} meson.build +git commit -a --amend +git push --force +``` + +### Request a new release version to an existing repository + +Adding new releases to an existing repo is straightforward. All you +need to do is to follow the rules discussed above but when you create +the merge request, file it against the master branch. The repository +reviewer will create the necessary branch and retarget your merge +request accordingly. + +## What is done by WrapDB maintainers + +[mesonwrap tools](Wrap-maintainer-tools.md) must be used for the tasks +below. + +### Adding new project to the Wrap provider service + +Each project gets its own repo. It is initialized like this: + +``` +mesonwrap new_repo --homepage=$HOMEPAGE --directory=$NEW_LOCAL_PROJECT_DIR $PROJECT_NAME +``` + +The command creates a new repository and uploads it to GitHub. + +`--version` flag may be used to create a branch immediately. + +### Adding a new branch to an existing project + +Create a new branch whose name matches the upstream release number. + +``` +git checkout master +git checkout -b 1.0.0 +git push origin 1.0.0 +(or from GitHub web page, remember to branch from master) +``` + +Branch names must fully match this regexp: `[a-z0-9._]+`. + +## Changes to original source + +The point of a wrap is to provide the upstream project with as few +changes as possible. Most projects should not contain anything more +than a few Meson definition files. Sometimes it may be necessary to +add a template header file or something similar. These should be held +at a minimum. + +It should especially be noted that there must **not** be any patches +to functionality. All such changes must be submitted to upstream. You +may also host your own Git repo with the changes if you wish. The Wrap +system has native support for Git subprojects. + +## Passing automatic validation + +Every submitted wrap goes through an automated correctness review and +passing it is a requirement for merging. Therefore it is highly +recommended that you run the validation checks yourself so you can fix +any issues faster. + +Instructions on how to install and run the review tool can be found on +the [Wrap review guidelines page](Wrap-review-guidelines.md). If your +submission is merge request number 5 for a repository called `mylib`, +then you'd run the following command: + + mesonwrap review --pull-request 5 mylib diff --git a/meson/docs/markdown/Additional.md b/meson/docs/markdown/Additional.md new file mode 100644 index 000000000..2fff48842 --- /dev/null +++ b/meson/docs/markdown/Additional.md @@ -0,0 +1,8 @@ +--- +short-description: Misc documentation +... + +# Additional documentation + +This section references documents miscellaneous design, benchmarks, or +basically anything concerning Meson. diff --git a/meson/docs/markdown/Build-options.md b/meson/docs/markdown/Build-options.md new file mode 100644 index 000000000..4a0b17800 --- /dev/null +++ b/meson/docs/markdown/Build-options.md @@ -0,0 +1,229 @@ +--- +short-description: Build options to configure project properties +... + +# Build options + +Most non-trivial builds require user-settable options. As an example a +program may have two different data backends that are selectable at +build time. Meson provides for this by having a option definition +file. Its name is `meson_options.txt` and it is placed at the root of +your source tree. + +Here is a simple option file. + +```meson +option('someoption', type : 'string', value : 'optval', description : 'An option') +option('other_one', type : 'boolean', value : false) +option('combo_opt', type : 'combo', choices : ['one', 'two', 'three'], value : 'three') +option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) # Since 0.45.0 +option('free_array_opt', type : 'array', value : ['one', 'two']) # Since 0.44.0 +option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two']) +option('some_feature', type : 'feature', value : 'enabled') # Since 0.47.0 +option('long_desc', type : 'string', value : 'optval', + description : 'An option with a very long description' + + 'that does something in a specific context') # Since 0.55.0 +``` + +For built-in options, see [Built-in options][builtin_opts]. + +## Build option types + +All types allow a `description` value to be set describing the option, +if no description is set then the name of the option will be used instead. + +### Strings + +The string type is a free form string. If the default value is not set +then an empty string will be used as the default. + +### Booleans + +Booleans may have values of either `true` or `false`. If no default +value is supplied then `true` will be used as the default. + +### Combos + +A combo allows any one of the values in the `choices` parameter to be +selected. If no default value is set then the first value will be the +default. + +### Integers + +An integer option contains a single integer with optional upper and +lower values that are specified with the `min` and `max` keyword +arguments. + +This type is available since Meson version 0.45.0. + +### Arrays + +Arrays represent an array of strings. By default the array can contain +arbitrary strings. To limit the possible values that can used set the +`choices` parameter. Meson will then only allow the value array to +contain strings that are in the given list. The array may be +empty. The `value` parameter specifies the default value of the option +and if it is unset then the values of `choices` will be used as the +default. + +As of 0.47.0 -Dopt= and -Dopt=[] both pass an empty list, before this +-Dopt= would pass a list with an empty string. + +This type is available since version 0.44.0 + +### Features + +A `feature` option has three states: `enabled`, `disabled` or `auto`. +It is intended to be passed as value for the `required` keyword +argument of most functions. Currently supported in +[`dependency()`](Reference-manual.md#dependency), +[`find_library()`](Reference-manual.md#compiler-object), +[`find_program()`](Reference-manual.md#find_program) and +[`add_languages()`](Reference-manual.md#add_languages) functions. + +- `enabled` is the same as passing `required : true`. +- `auto` is the same as passing `required : false`. +- `disabled` do not look for the dependency and always return 'not-found'. + +When getting the value of this type of option using `get_option()`, a +special [feature option +object](Reference-manual.md#feature-option-object) is returned instead +of the string representation of the option's value. This object can be +passed to `required`: + +```meson +d = dependency('foo', required : get_option('myfeature')) +if d.found() + app = executable('myapp', 'main.c', dependencies : [d]) +endif +``` + +To check the value of the feature, the object has three methods +returning a boolean and taking no argument: + +- `.enabled()` +- `.disabled()` +- `.auto()` + +This is useful for custom code depending on the feature: + +```meson +if get_option('myfeature').enabled() + # ... +endif +``` + +If the value of a `feature` option is set to `auto`, that value is +overridden by the global `auto_features` option (which defaults to +`auto`). This is intended to be used by packagers who want to have +full control on which dependencies are required and which are +disabled, and not rely on build-deps being installed (at the right +version) to get a feature enabled. They could set +`auto_features=enabled` to enable all features and disable explicitly +only the few they don't want, if any. + +This type is available since version 0.47.0 + +## Using build options + +```meson +optval = get_option('opt_name') +``` + +This function also allows you to query the value of Meson's built-in +project options. For example, to get the installation prefix you would +issue the following command: + +```meson +prefix = get_option('prefix') +``` + +It should be noted that you can not set option values in your Meson +scripts. They have to be set externally with the `meson configure` +command line tool. Running `meson configure` without arguments in a +build dir shows you all options you can set. + +To change their values use the `-D` +option: + +```console +$ meson configure -Doption=newvalue +``` + +Setting the value of arrays is a bit special. If you only pass a +single string, then it is considered to have all values separated by +commas. Thus invoking the following command: + +```console +$ meson configure -Darray_opt=foo,bar +``` + +would set the value to an array of two elements, `foo` and `bar`. + +If you need to have commas in your string values, then you need to +pass the value with proper shell quoting like this: + +```console +$ meson configure "-Doption=['a,b', 'c,d']" +``` + +The inner values must always be single quotes and the outer ones +double quotes. + +To change values in subprojects prepend the name of the subproject and +a colon: + +```console +$ meson configure -Dsubproject:option=newvalue +``` + +**NOTE:** If you cannot call `meson configure` you likely have a old + version of Meson. In that case you can call `mesonconf` instead, but + that is deprecated in newer versions + +## Yielding to superproject option + +Suppose you have a master project and a subproject. In some cases it +might be useful to have an option that has the same value in both of +them. This can be achieved with the `yield` keyword. Suppose you have +an option definition like this: + +```meson +option('some_option', type : 'string', value : 'value', yield : true) +``` + +If you build this project on its own, this option behaves like +usual. However if you build this project as a subproject of another +project which also has an option called `some_option`, then calling +`get_option` returns the value of the superproject. If the value of +`yield` is `false`, `get_option` returns the value of the subproject's +option. + + +## Built-in build options + +There are a number of [built-in options][builtin_opts]. To get the +current list execute `meson configure` in the build directory. + +[builtin_opts]: https://mesonbuild.com/Builtin-options.html + +### Visual Studio + +#### Startup project + +The `backend_startup_project` option can be set to define the default +project that will be executed with the "Start debugging F5" action in +visual studio. It should be the same name as an executable target +name. + +```meson +project('my_project', 'c', default_options: ['backend_startup_project=my_exe']) +executable('my_exe', ...) +``` + +### Ninja + +#### Max links + +The `backend_max_links` can be set to limit the number of processes +that ninja will use to link. diff --git a/meson/docs/markdown/Build-system-converters.md b/meson/docs/markdown/Build-system-converters.md new file mode 100644 index 000000000..1c0b4fc50 --- /dev/null +++ b/meson/docs/markdown/Build-system-converters.md @@ -0,0 +1,27 @@ +--- +short-description: Converting other build systems to Meson +... + +# Build system converters + +Moving from one build system into another includes a fair bit of +work. To make things easier, Meson provides scripts to convert other +build systems into Meson. At the time of writing, scripts for CMake +and autotools exist. It can be found in the `tools` subdirectory in +Meson's source tree. + +The scripts do not try to do a perfect conversion. This would be +extremely difficult because the data models of other build systems are +very different. The goal of the converter script is to convert as much +of the low level drudgery as possible. Using the scripts is +straightforward. We'll use the CMake one as an example but the +Autotools one works exactly the same way. + + cmake2meson.py path/to/CMake/project/root + +This command generates a skeleton Meson project definition that tries +to mirror CMake's setup as close as possible. Once this is done, you +need to go through these files manually and finalize the +conversion. To make this task as simple as possible, the converter +script will transfer all comments from the CMake definition into Meson +definition. diff --git a/meson/docs/markdown/Build-targets.md b/meson/docs/markdown/Build-targets.md new file mode 100644 index 000000000..83f959f0b --- /dev/null +++ b/meson/docs/markdown/Build-targets.md @@ -0,0 +1,100 @@ +--- +short-description: Definition of build targets +... + +# Build targets + +Meson provides four kinds of build targets: executables, libraries +(which can be set to be built as static or shared or both of them at +the build configuration time), static libraries, and shared libraries. +They are created with the commands `executable`, `library`, +`static_library` and `shared_library`, respectively. All objects created +in this way are **immutable**. That is, you can not change any aspect of +them after they have been constructed. This ensures that all information +pertaining to a given build target is specified in one well defined +place. + +Libraries and executables +-- + +As an example, here is how you would build a library. + +```meson +project('shared lib', 'c') +library('mylib', 'source.c') +``` + +It is generally preferred to use the `library` command instead of +`shared_library` and `static_library` and then configure which +libraries (static or shared or both of them) will be built at the +build configuration time using the `default_library` +[built-in option](Builtin-options.md). + +In Unix-like operating systems, shared libraries can be +versioned. Meson supports this with keyword arguments, which will be +ignored if the library is configured as static at the compile time. + +```meson +project('shared lib', 'c') +library('mylib', 'source.c', version : '1.2.3', soversion : '0') +``` + +It is common to build a library and then an executable that links +against it. This is supported as well. + +```meson +project('shared lib', 'c') +lib = library('mylib', 'source.c') +executable('program', 'prog.c', link_with : lib) +``` + +Meson sets things up so that the resulting executable can be run +directly from the build directory. There is no need to write shell +scripts or set environment variables. + +One target can have multiple language source files. + +```meson +project('multilang', 'c', 'cpp') +executable('multiexe', 'file.c', 'file2.cc') +``` + +Object files +-- + +Sometimes you can't build files from sources but need to utilize an +existing object file. A typical case is using an object file provided +by a third party. Object files can be specified just like sources. + +```meson +exe = executable('myexe', 'source.cpp', objects : 'third_party_object.o') +``` + +A different case is when you want to use object files built in one +target directly in another. A typical case is when you build a shared +library and it has an internal class that is not exported in the +ABI. This means you can't access it even if you link against the +library. Typical workarounds for this include building both a shared +and static version of the library or putting the source file in the +test executable's source list. Both of these approaches cause the +source to be built twice, which is slow. + +In Meson you can extract object files from targets and use them as-is +on other targets. This is the syntax for it. + +```meson +lib = shared_library('somelib', 'internalclass.cc', 'file.cc', ...) +eo = lib.extract_objects('internalclass.cc') +executable('classtest', 'classtest.cpp', objects : eo) +``` + +Here we take the internal class object and use it directly in the +test. The source file is only compiled once. + +Note that careless use of this feature may cause strange bugs. As an +example trying to use objects of an executable or static library in a +shared library will not work because shared library objects require +special compiler flags. Getting this right is the user's +responsibility. For this reason it is strongly recommended that you +only use this feature for generating unit test executables in the +manner described above. diff --git a/meson/docs/markdown/Builtin-options.md b/meson/docs/markdown/Builtin-options.md new file mode 100644 index 000000000..0536e77bc --- /dev/null +++ b/meson/docs/markdown/Builtin-options.md @@ -0,0 +1,262 @@ +--- +short-description: Built-in options to configure project properties +... + +# Built-in options + +Meson provides two kinds of options: [build options provided by the +build files](Build-options.md) and built-in options that are either +universal options, base options, compiler options. + +## Universal options + +A list of these options can be found by running `meson --help`. All +these can be set by passing `-Doption=value` to `meson` (aka `meson +setup`), or by setting them inside `default_options` of `project()` in +your `meson.build`. Some options can also be set by `--option=value`, +or `--option value`--- a list is shown by running `meson setup +--help`. + +For legacy reasons `--warnlevel` is the cli argument for the +`warning_level` option. + +They can also be edited after setup using `meson configure +-Doption=value`. + +Installation options are all relative to the prefix, except: + +* When the prefix is `/usr`: `sysconfdir` defaults to `/etc`, +* `localstatedir` defaults to `/var`, and `sharedstatedir` defaults to +* `/var/lib` When the prefix is `/usr/local`: `localstatedir` defaults +* to `/var/local`, and `sharedstatedir` defaults to `/var/local/lib` + +### Directories + +| Option | Default value | Description | +| ------ | ------------- | ----------- | +| prefix | see below | Installation prefix | +| bindir | bin | Executable directory | +| datadir | share | Data file directory | +| includedir | include | Header file directory | +| infodir | share/info | Info page directory | +| libdir | see below | Library directory | +| libexecdir | libexec | Library executable directory | +| localedir | share/locale | Locale data directory | +| localstatedir | var | Localstate data directory | +| mandir | share/man | Manual page directory | +| sbindir | sbin | System executable directory | +| sharedstatedir | com | Architecture-independent data directory | +| sysconfdir | etc | Sysconf data directory | + + +`prefix` defaults to `C:/` on Windows, and `/usr/local` otherwise. You +should always override this value. + +`libdir` is automatically detected based on your platform, it should +be correct when doing "native" (build machine == host machine) +compilation. For cross compiles Meson will try to guess the correct +libdir, but it may not be accurate, especially on Linux where +different distributions have different defaults. Using a [cross +file](Cross-compilation.md#defining-the-environment), particularly the +paths section may be necessary. + +### Core options + +Options that are labeled "per machine" in the table are set per +machine. See the [specifying options per +machine](#specifying-options-per-machine) section for details. + +| Option | Default value | Description | Is per machine | Is per subproject | +| ------ | ------------- | ----------- | -------------- | ----------------- | +| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no | +| backend {ninja, vs,
vs2010, vs2012, vs2013, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | no | +| buildtype {plain, debug,
debugoptimized, release, minsize, custom} | debug | Build type to use | no | no | +| debug | true | Debug | no | no | +| default_library {shared, static, both} | shared | Default library type | no | yes | +| errorlogs | true | Whether to print the logs from failing tests. | no | no | +| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no | +| layout {mirror,flat} | mirror | Build directory layout | no | no | +| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | no | +| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | no | +| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | no | +| stdsplit | true | Split stdout and stderr in test logs | no | no | +| strip | false | Strip targets on install | no | no | +| unity {on, off, subprojects} | off | Unity build | no | no | +| unity_size {>=2} | 4 | Unity file block size | no | no | +| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | yes | +| werror | false | Treat warnings as errors | no | yes | +| wrap_mode {default, nofallback,
nodownload, forcefallback, nopromote} | default | Wrap mode to use | no | no | +| force_fallback_for | [] | Force fallback for those dependencies | no | no | + + For setting optimization levels and +toggling debug, you can either set the `buildtype` option, or you can +set the `optimization` and `debug` options which give finer control +over the same. Whichever you decide to use, the other will be deduced +from it. For example, `-Dbuildtype=debugoptimized` is the same as +`-Ddebug=true -Doptimization=2` and vice-versa. This table documents +the two-way mapping: + +| buildtype | debug | optimization | +| --------- | ----- | ------------ | +| plain | false | 0 | +| debug | true | 0 | +| debugoptimized | true | 2 | +| release | false | 3 | +| minsize | true | s | + +All other combinations of `debug` and `optimization` set `buildtype` to `'custom'`. + +## Base options + +These are set in the same way as universal options, either by +`-Doption=value`, or by setting them inside `default_options` of +`project()` in your `meson.build`. However, they cannot be shown in +the output of `meson --help` because they depend on both the current +platform and the compiler that will be selected. The only way to see +them is to setup a builddir and then run `meson configure` on it with +no options. + +The following options are available. Note that they may not be +available on all platforms or with all compilers: + +| Option | Default value | Possible values | Description | +|---------------|----------------|------------------------------------------------------------------|-------------------------------------------------------------------------------| +| b_asneeded | true | true, false | Use -Wl,--as-needed when linking | +| b_bitcode | false | true, false | Embed Apple bitcode, see below | +| b_colorout | always | auto, always, never | Use colored output | +| b_coverage | false | true, false | Enable coverage tracking | +| b_lundef | true | true, false | Don't allow undefined symbols when linking | +| b_lto | false | true, false | Use link time optimization | +| b_lto_threads | 0 | Any integer* | Use multiple threads for lto. *(Added in 0.57.0)* | +| b_lto_mode | default | default, thin | Select between lto modes, thin and default. *(Added in 0.57.0)* | +| b_ndebug | false | true, false, if-release | Disable asserts | +| b_pch | true | true, false | Use precompiled headers | +| b_pgo | off | off, generate, use | Use profile guided optimization | +| b_sanitize | none | see below | Code sanitizer to use | +| b_staticpic | true | true, false | Build static libraries as position independent | +| b_pie | false | true, false | Build position-independent executables (since 0.49.0) | +| b_vscrt | from_buildtype | none, md, mdd, mt, mtd, from_buildtype, static_from_buildtype | VS runtime library to use (since 0.48.0) (static_from_buildtype since 0.56.0) | + +The value of `b_sanitize` can be one of: `none`, `address`, `thread`, +`undefined`, `memory`, `address,undefined`, but note that some +compilers might not support all of them. For example Visual Studio +only supports the address sanitizer. + +* < 0 means disable, == 0 means automatic selection, > 0 sets a specific number to use + +LLVM supports `thin` lto, for more discussion see [LLVM's documentation](https://clang.llvm.org/docs/ThinLTO.html) + + +The default value of `b_vscrt` is `from_buildtype`. The following table is +used internally to pick the CRT compiler arguments for `from_buildtype` or +`static_from_buildtype` *(since 0.56)* based on the value of the `buildtype` +option: + +| buildtype | from_buildtype | static_from_buildtype | +| -------- | -------------- | --------------------- | +| debug | `/MDd` | `/MTd` | +| debugoptimized | `/MD` | `/MT` | +| release | `/MD` | `/MT` | +| minsize | `/MD` | `/MT` | +| custom | error! | error! | + +### Notes about Apple Bitcode support + +`b_bitcode` will pass `-fembed-bitcode` while compiling and will pass +`-Wl,-bitcode_bundle` while linking. These options are incompatible +with `b_asneeded`, so that option will be silently disabled. + +[Shared modules](Reference-manual.md#shared_module) will not have +bitcode embedded because `-Wl,-bitcode_bundle` is incompatible with +both `-bundle` and `-Wl,-undefined,dynamic_lookup` which are necessary +for shared modules to work. + +## Compiler options + +Same caveats as base options above. + +The following options are available. They can be set by passing +`-Doption=value` to `meson`. Note that both the options themselves and +the possible values they can take will depend on the target platform +or compiler being used: + +| Option | Default value | Possible values | Description | +| ------ | ------------- | --------------- | ----------- | +| c_args | | free-form comma-separated list | C compile arguments to use | +| c_link_args | | free-form comma-separated list | C link arguments to use | +| c_std | none | none, c89, c99, c11, c17, c18, c2x, gnu89, gnu99, gnu11, gnu17, gnu18, gnu2x | C language standard to use | +| c_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against | +| c_thread_count | 4 | integer value ≥ 0 | Number of threads to use with emcc when using threads | +| cpp_args | | free-form comma-separated list | C++ compile arguments to use | +| cpp_link_args | | free-form comma-separated list | C++ link arguments to use | +| cpp_std | none | none, c++98, c++03, c++11, c++14, c++17, c++20
c++2a, c++1z, gnu++03, gnu++11, gnu++14, gnu++17, gnu++1z,
gnu++2a, gnu++20, vc++14, vc++17, vc++latest | C++ language standard to use | +| cpp_debugstl | false | true, false | C++ STL debug mode | +| cpp_eh | default | none, default, a, s, sc | C++ exception handling type | +| cpp_rtti | true | true, false | Whether to enable RTTI (runtime type identification) | +| cpp_thread_count | 4 | integer value ≥ 0 | Number of threads to use with emcc when using threads | +| cpp_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against | +| fortran_std | none | [none, legacy, f95, f2003, f2008, f2018] | Fortran language standard to use | +| cuda_ccbindir | | filesystem path | CUDA non-default toolchain directory to use (-ccbin) *(Added in 0.57.1)* | + +The default values of `c_winlibs` and `cpp_winlibs` are in +compiler-specific argument forms, but the libraries are: kernel32, +user32, gdi32, winspool, shell32, ole32, oleaut32, uuid, comdlg32, +advapi32. + +All these `_*` options are specified per machine. See below in +the [specifying options per machine](#specifying-options-per-machine) +section on how to do this in cross builds. + +When using MSVC, `cpp_eh=none` will result in no exception flags being +passed, while the `cpp_eh=[value]` will result in `/EH[value]`. Since +*0.51.0* `cpp_eh=default` will result in `/EHsc` on MSVC. When using +gcc-style compilers, nothing is passed (allowing exceptions to work), +while `cpp_eh=none` passes `-fno-exceptions`. + +Since *0.54.0* The `_thread_count` option can be used to control +the value passed to `-s PTHREAD_POOL_SIZE` when using emcc. No other +c/c++ compiler supports this option. + +## Specifying options per machine + +Since *0.51.0*, some options are specified per machine rather than +globally for all machine configurations. Prefixing the option with +`build.` just affects the build machine configuration, while +unprefixed just affects the host machine configuration, respectively. +For example: + + - `build.pkg_config_path` controls the paths pkg-config will search + for just `native: true` dependencies (build machine). + + - `pkg_config_path` controls the paths pkg-config will search for + just `native: false` dependencies (host machine). + +This is useful for cross builds. In the native builds, build = host, +and the unprefixed option alone will suffice. + +Prior to *0.51.0*, these options just effected native builds when +specified on the command line, as there was no `build.` prefix. +Similarly named fields in the `[properties]` section of the cross file +would effect cross compilers, but the code paths were fairly different +allowing differences in behavior to crop out. + +## Specifying options per subproject + +Since *0.54.0* `default_library` and `werror` built-in options can be +defined per subproject. This is useful for example when building +shared libraries in the main project, but static link a subproject, or +when the main project must build with no warnings but some subprojects +cannot. + +Most of the time this would be used either by the parent project by +setting subproject's default_options (e.g. `subproject('foo', +default_options: 'default_library=static')`), or by the user using the +command line `-Dfoo:default_library=static`. + +The value is overridden in this order: +- Value from parent project +- Value from subproject's default_options if set +- Value from subproject() default_options if set +- Value from command line if set + +Since 0.56.0 `warning_level` can also be defined per subproject. diff --git a/meson/docs/markdown/CMake-module.md b/meson/docs/markdown/CMake-module.md new file mode 100644 index 000000000..b82227d9b --- /dev/null +++ b/meson/docs/markdown/CMake-module.md @@ -0,0 +1,289 @@ +# CMake module + +**Note**: the functionality of this module is governed by [Meson's + rules on mixing build systems](Mixing-build-systems.md). + +This module provides helper tools for generating cmake package files. +It also supports the usage of CMake based subprojects, similar to +the normal [Meson subprojects](Subprojects.md). + + +## Usage + +To use this module, just do: **`cmake = import('cmake')`**. The +following functions will then be available as methods on the object +with the name `cmake`. You can, of course, replace the name `cmake` +with anything else. + +It is generally recommended to use the latest Meson version and +CMake >=3.17 for best compatibility. CMake subprojects will +usually also work with older CMake versions. However, this can +lead to unexpected issues in rare cases. + +## CMake subprojects + +Using CMake subprojects is similar to using the "normal" Meson +subprojects. They also have to be located in the `subprojects` +directory. + +Example: + +```cmake +add_library(cm_lib SHARED ${SOURCES}) +``` + +```meson +cmake = import('cmake') + +# Configure the CMake project +sub_proj = cmake.subproject('libsimple_cmake') + +# Fetch the dependency object +cm_lib = sub_proj.dependency('cm_lib') + +executable(exe1, ['sources'], dependencies: [cm_lib]) +``` + +The `subproject` method is almost identical to the normal Meson +`subproject` function. The only difference is that a CMake project +instead of a Meson project is configured. + +The returned `sub_proj` supports the same options as a "normal" +subproject. Meson automatically detects CMake build targets, which can +be accessed with the methods listed [below](#subproject-object). + +It is usually enough to just use the dependency object returned by the +`dependency()` method in the build targets. This is almost identical +to using `declare_dependency()` object from a normal Meson subproject. + +It is also possible to use executables defined in the CMake project as code +generators with the `target()` method: + +```cmake +add_executable(cm_exe ${EXE_SRC}) +``` + +```meson +cmake = import('cmake') + +# Subproject with the "code generator" +sub_pro = cmake.subproject('cmCodeGen') + +# Fetch the code generator exe +sub_exe = sub_pro.target('cm_exe') + +# Use the code generator +generated = custom_target( + 'cmake-generated', + input: [], + output: ['test.cpp'], + command: [sub_exe, '@OUTPUT@'] +) +``` + +It should be noted that not all projects are guaranteed to work. The +safest approach would still be to create a `meson.build` for the +subprojects in question. + +### Configuration options + +*New in meson 0.55.0* + +Meson also supports passing configuration options to CMake and overriding +certain build details extracted from the CMake subproject. + +```meson +cmake = import('cmake') +opt_var = cmake.subproject_options() + +# Call CMake with `-DSOME_OTHER_VAR=ON` +opt_var.add_cmake_defines({'SOME_OTHER_VAR': true}) + +# Globally override the C++ standard to c++11 +opt_var.set_override_option('cpp_std', 'c++11') + +# Override the previous global C++ standard +# with c++14 only for the CMake target someLib +opt_var.set_override_option('cpp_std', 'c++14', target: 'someLib') + +sub_pro = cmake.subproject('someLibProject', options: opt_var) + +# Further changes to opt_var have no effect +``` + +See [the CMake options object](#cmake-options-object) for a complete +reference of all supported functions. + +The CMake configuration options object is very similar to the +[configuration data +object](Reference-manual.md#configuration-data-object) object returned +by [`configuration_data`](Reference-manual.md#configuration_data). It +is generated by the `subproject_options` function + +All configuration options have to be set *before* the subproject is +configured and must be passed to the `subproject` method via the +`options` key. Altering the configuration object won't have any effect +on previous `cmake.subproject` calls. + +In earlier Meson versions CMake command-line parameters could be set +with the `cmake_options` kwarg. However, this feature is deprecated +since 0.55.0 and only kept for compatibility. It will not work +together with the `options` kwarg. + +### `subproject` object + +This object is returned by the `subproject` function described above +and supports the following methods: + + - `dependency(target)` returns a dependency object for any CMake target. The + `include_type` kwarg *(new in 0.56.0)* controls the include type of the + returned dependency object similar to the same kwarg in the + [`dependency()`](Reference-manual.md#dependency) function. + - `include_directories(target)` returns a Meson `include_directories()` + object for the specified target. Using this function is not necessary + if the dependency object is used. + - `target(target)` returns the raw build target. + - `target_type(target)` returns the type of the target as a string + - `target_list()` returns a list of all target *names*. + - `get_variable(name)` fetches the specified variable from inside + the subproject. Usually `dependency()` or `target()` should be + preferred to extract build targets. + - `found` returns true if the subproject is available, otherwise false + *new in Meson 0.53.2* + +### `cmake options` object + +This object is returned by the `subproject_options()` function and +consumed by the `options` kwarg of the `subproject` function. The +following methods are supported: + + - `add_cmake_defines({'opt1': val1, ...})` add additional CMake commandline defines + - `set_override_option(opt, val)` set specific [build options](Build-options.md) + for targets. This will effectively add `opt=val` to the `override_options` + array of the [build target](Reference-manual.md#executable) + - `set_install(bool)` override wether targets should be installed or not + - `append_compile_args(lang, arg1, ...)` append compile flags for a specific + language to the targets + - `append_link_args(arg1, ...)` append linger args to the targets + - `clear()` reset all data in the `cmake options` object + +The methods `set_override_option`, `set_install`, +`append_compile_args` and `append_link_args` support the optional +`target` kwarg. If specified, the set options affect the specific +target. The effect of the option is global for the subproject +otherwise. + +If, for instance, `opt_var.set_install(false)` is called, no target +will be installed regardless of what is set by CMake. However, it is +still possible to install specific targets (here `foo`) by setting the +`target` kwarg: `opt_var.set_install(true, target: 'foo')` + +Options that are not set won't affect the generated subproject. So, if +for instance, `set_install` was not called then the values extracted +from CMake will be used. + +### Cross compilation + +*New in 0.56.0* + +Meson will try to automatically guess most of the required CMake +toolchain variables from existing entries in the cross and native +files. These variables will be stored in an automatically generate +CMake toolchain file in the build directory. The remaining variables +that can't be guessed can be added by the user in the `[cmake]` +cross/native file section (*new in 0.56.0*). + +Adding a manual CMake toolchain file is also supported with the +`cmake_toolchain_file` setting in the `[properties]` section. Directly +setting a CMake toolchain file with +`-DCMAKE_TOOLCHAIN_FILE=/path/to/some/Toolchain.cmake` in the +`meson.build` is **not** supported since the automatically generated +toolchain file is also used by Meson to inject arbitrary code into +CMake to enable the CMake subproject support. + +The closest configuration to only using a manual CMake toolchain file +would be to set these options in the machine file: + +```ini +[properties] + +cmake_toolchain_file = '/path/to/some/Toolchain.cmake' +cmake_defaults = false + +[cmake] + +# No entries in this section +``` + +This will result in a toolchain file with just the bare minimum to +enable the CMake subproject support and `include()` the +`cmake_toolchain_file` as the last instruction. + +For more information see the [cross and native file +specification](Machine-files.md). + +## CMake configuration files + +### cmake.write_basic_package_version_file() + +This function is the equivalent of the corresponding [CMake +function](https://cmake.org/cmake/help/v3.11/module/CMakePackageConfigHelpers.html#generating-a-package-version-file), +it generates a `name` package version file. + +* `name`: the name of the package. +* `version`: the version of the generated package file. +* `compatibility`: a string indicating the kind of compatibility, the accepted values are +`AnyNewerVersion`, `SameMajorVersion`, `SameMinorVersion` or `ExactVersion`. +It defaults to `AnyNewerVersion`. Depending on your cmake installation some kind of +compatibility may not be available. +* `install_dir`: optional installation directory, it defaults to `$(libdir)/cmake/$(name)` + + +Example: + +```meson +cmake = import('cmake') + +cmake.write_basic_package_version_file(name: 'myProject', version: '1.0.0') +``` + +### cmake.configure_package_config_file() + +This function is the equivalent of the corresponding [CMake +function](https://cmake.org/cmake/help/v3.11/module/CMakePackageConfigHelpers.html#generating-a-package-configuration-file), +it generates a `name` package configuration file from the `input` +template file. Just like the cmake function in this file the +`@PACKAGE_INIT@` statement will be replaced by the appropriate piece +of cmake code. The equivalent `PATH_VARS` argument is given through +the `configuration` parameter. + +* `name`: the name of the package. +* `input`: the template file where that will be treated for variable substitutions contained in `configuration`. +* `install_dir`: optional installation directory, it defaults to `$(libdir)/cmake/$(name)`. +* `configuration`: a `configuration_data` object that will be used for variable substitution in the template file. + + +Example: + +meson.build: + +```meson +cmake = import('cmake') + +conf = configuration_data() +conf.set_quoted('VAR', 'variable value') + +cmake.configure_package_config_file( + name: 'myProject', + input: 'myProject.cmake.in', + configuration: conf +) +``` + +myProject.cmake.in: + +```text +@PACKAGE_INIT@ + +set(MYVAR VAR) +``` diff --git a/meson/docs/markdown/Code-formatting.md b/meson/docs/markdown/Code-formatting.md new file mode 100644 index 000000000..386c78725 --- /dev/null +++ b/meson/docs/markdown/Code-formatting.md @@ -0,0 +1,58 @@ +--- +short-description: Code formatting +... + +# clang-format + +*Since 0.50.0* + +When `clang-format` is installed and a `.clang-format` file is found at the main +project's root source directory, Meson automatically adds a `clang-format` target +that reformat all C and C++ files (currently only with Ninja backend). + +```sh +ninja -C builddir clang-format +``` + +*Since 0.58.0* + +It is possible to restrict files to be reformatted with optional +`.clang-format-include` and `.clang-format-ignore` files. + +The file `.clang-format-include` contains a list of patterns matching the files +that will be reformatted. The `**` pattern matches this directory and all +subdirectories recursively. Empty lines and lines starting with `#` are ignored. +If `.clang-format-include` is not found, the pattern defaults to `**/*` which +means all files recursively in the source directory but has the disadvantage to +walk the whole source tree which could be slow in the case it contains lots of +files. + +Example of `.clang-format-include` file: +``` +# All files in src/ and its subdirectories +src/**/* + +# All files in include/ but not its subdirectories +include/* +``` + +The file `.clang-format-ignore` contains a list of patterns matching the files +that will be excluded. Files matching the include list (see above) that match +one of the ignore pattern will not be reformatted. Unlike include patters, ignore +patterns does not support `**` and a single `*` match any characters including +path separators. Empty lines and lines starting with `#` are ignored. + +The build directory and file without a well known C or C++ suffix are always +ignored. + +Example of `.clang-format-ignore` file: +``` +# Skip C++ files in src/ directory +src/*.cpp +``` + +Note that `.clang-format-ignore` has the same format as used by +[`run-clang-format.py`](https://github.com/Sarcasm/run-clang-format). + +A new target `clang-format-check` has been added. It returns an error code if +any file needs to be reformatted. This is intended to be used by CI. diff --git a/meson/docs/markdown/Commands.md b/meson/docs/markdown/Commands.md new file mode 100644 index 000000000..3f3cf9f09 --- /dev/null +++ b/meson/docs/markdown/Commands.md @@ -0,0 +1,312 @@ +# Command-line commands + +There are two different ways of invoking Meson. First, you can run it +directly from the source tree with the command +`/path/to/source/meson.py`. Meson may also be installed in which case +the command is simply `meson`. In this manual we only use the latter +format for simplicity. + +Meson is invoked using the following syntax: +`meson [COMMAND] [COMMAND_OPTIONS]` + +This section describes all available commands and some of their +Optional arguments. The most common workflow is to run +[`setup`](#setup), followed by [`compile`](#compile), and then +[`install`](#install). + +For the full list of all available options for a specific command use +the following syntax: `meson COMMAND --help` + +### configure + +{{ configure_usage.inc }} + +Changes options of a configured meson project. + +{{ configure_arguments.inc }} + +Most arguments are the same as in [`setup`](#setup). + +Note: reconfiguring project will not reset options to their default +values (even if they were changed in `meson.build`). + +#### Examples: + +List all available options: +``` +meson configure builddir +``` + +Change value of a single option: +``` +meson configure builddir -Doption=new_value +``` + +### compile + +*(since 0.54.0)* + +{{ compile_usage.inc }} + +Builds a default or a specified target of a configured Meson project. + +{{ compile_arguments.inc }} + +`--verbose` argument is available since 0.55.0. + +#### Targets + +*(since 0.55.0)* + +`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where: +- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). +- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. +- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'. + +`PATH` and/or `TYPE` can be omitted if the resulting `TARGET` can be +used to uniquely identify the target in `meson.build`. + +#### Backend specific arguments + +*(since 0.55.0)* + +`BACKEND-args` use the following syntax: + +If you only pass a single string, then it is considered to have all +values separated by commas. Thus invoking the following command: + +``` +$ meson compile --ninja-args=-n,-d,explain +``` + +would add `-n`, `-d` and `explain` arguments to ninja invocation. + +If you need to have commas or spaces in your string values, then you +need to pass the value with proper shell quoting like this: + +``` +$ meson compile "--ninja-args=['a,b', 'c d']" +``` + +#### Examples: + +Build the project: +``` +meson compile -C builddir +``` + +Execute a dry run on ninja backend with additional debug info: + +``` +meson compile --ninja-args=-n,-d,explain +``` + +Build three targets: two targets that have the same `foo` name, but +different type, and a `bar` target: + +``` +meson compile foo:shared_library foo:static_library bar +``` + +Produce a coverage html report (if available): + +``` +meson compile coverage-html +``` + +### dist + +*(since 0.52.0)* + +{{ dist_usage.inc }} + +Generates a release archive from the current source tree. + +{{ dist_arguments.inc }} + +See [notes about creating releases](Creating-releases.md) for more info. + +#### Examples: + +Create a release archive: +``` +meson dist -C builddir +``` + +### init + +*(since 0.45.0)* + +{{ init_usage.inc }} + +Creates a basic set of build files based on a template. + +{{ init_arguments.inc }} + +#### Examples: + +Create a project in `sourcedir`: +``` +meson init -C sourcedir +``` + +### introspect + +{{ introspect_usage.inc }} + +Displays information about a configured Meson project. + +{{ introspect_arguments.inc }} + +#### Examples: + +Display basic information about a configured project in `builddir`: + +``` +meson introspect builddir --projectinfo +``` + +### install + +*(since 0.47.0)* + +{{ install_usage.inc }} + +Installs the project to the prefix specified in [`setup`](#setup). + +{{ install_arguments.inc }} + +See [the installation documentation](Installing.md) for more info. + +#### Examples: + +Install project to `prefix`: +``` +meson install -C builddir +``` + +Install project to `$DESTDIR/prefix`: +``` +DESTDIR=/path/to/staging/area meson install -C builddir +``` + +### rewrite + +*(since 0.50.0)* + +{{ rewrite_usage.inc }} + +Modifies the Meson project. + +{{ rewrite_arguments.inc }} + +See [the Meson file rewriter documentation](Rewriter.md) for more info. + +### setup + +{{ setup_usage.inc }} + +Configures a build directory for the Meson project. + +This is the default Meson command (invoked if there was no COMMAND supplied). + +{{ setup_arguments.inc }} + +See [Meson introduction +page](Running-Meson.md#configuring-the-build-directory) for more info. + +#### Examples: + +Configures `builddir` with default values: +``` +meson setup builddir +``` + +### subprojects + +*(since 0.49.0)* + +{{ subprojects_usage.inc }} + +Manages subprojects of the Meson project. *Since 0.59.0* commands are run on +multiple subprojects in parallel by default, use `--num-processes=1` if it is +not desired. + +{{ subprojects_arguments.inc }} + +### test + +{{ test_usage.inc }} + +Run tests for the configure Meson project. + +{{ test_arguments.inc }} + +See [the unit test documentation](Unit-tests.md) for more info. + +#### Examples: + +Run tests for the project: +``` +meson test -C builddir +``` + +Run only `specific_test_1` and `specific_test_2`: +``` +meson test -C builddir specific_test_1 specific_test_2 +``` + +### wrap + +{{ wrap_usage.inc }} + +An utility to manage WrapDB dependencies. + +{{ wrap_arguments.inc }} + +See [the WrapDB tool documentation](Using-wraptool.md) for more info. + +### devenv + +*(since 0.58.0)* + +{{ devenv_usage.inc }} + +Runs a command, or open interactive shell if no command is provided, with +environment setup to run project from the build directory, without installation. + +We automatically handle `bash` and set `$PS1` accordingly. If the automatic `$PS1` +override is not desired (maybe you have a fancy custom prompt), set the +`$MESON_DISABLE_PS1_OVERRIDE` environment variable and use `$MESON_PROJECT_NAME` +when setting the custom prompt, for example with a snippet like the following: + +```bash +... +if [[ -n "${MESON_PROJECT_NAME-}" ]]; +then + PS1+="[ ${MESON_PROJECT_NAME} ]" +fi +... +``` + +These variables are set in environment in addition to those set using `meson.add_devenv()`: +- `MESON_DEVENV` is defined to `'1'`. +- `MESON_PROJECT_NAME` is defined to the main project's name. +- `PKG_CONFIG_PATH` includes the directory where Meson generates `-uninstalled.pc` + files. +- `PATH` includes every directory where there is an executable that would be + installed into `bindir`. On windows it also includes every directory where there + is a DLL needed to run those executables. +- `LD_LIBRARY_PATH` includes every directory where there is a shared library that + would be installed into `libdir`. This allows to run system application using + custom build of some libraries. For example running system GEdit when building + GTK from git. On OSX the environment variable is `DYLD_LIBRARY_PATH` and + `PATH` on Windows. +- `GI_TYPELIB_PATH` includes every directory where a GObject Introspection + typelib is built. This is automatically set when using `gnome.generate_gir()`. +- `GSETTINGS_SCHEMA_DIR` *Since 0.59.0* includes every directory where a GSettings + schemas is compiled. This is automatically set when using `gnome.compile_schemas()`. + Note that this requires GLib >= 2.64 when `gnome.compile_schemas()` is used in + more than one directory. + +{{ devenv_arguments.inc }} diff --git a/meson/docs/markdown/Comparisons.md b/meson/docs/markdown/Comparisons.md new file mode 100644 index 000000000..1deef6979 --- /dev/null +++ b/meson/docs/markdown/Comparisons.md @@ -0,0 +1,78 @@ +--- +title: Comparisons +... + +# Comparing Meson with other build systems + +A common question is *Why should I choose Meson over a different build +system X?* There is no one true answer to this as it depends on the +use case. Almost all build systems have all the functionality needed +to build medium-to-large projects so the decision is usually made on +other points. Here we list some pros and cons of various build systems +to help you do the decision yourself. + +## GNU Autotools + +### Pros + +Excellent support for legacy Unix platforms, large selection of +existing modules. + +### Cons + +Needlessly slow, complicated, hard to use correctly, unreliable, +painful to debug, incomprehensible for most people, poor support for +non-Unix platforms (especially Windows). + +## CMake + +### Pros + +Great support for multiple backends (Visual Studio, XCode, etc). + +### Cons + +The scripting language is cumbersome to work with. Some simple things +are more complicated than necessary. + +## SCons + +### Pros + +Full power of Python available for defining your build. + +### Cons + +Slow. Requires you to pass your configuration settings on every +invocation. That is, if you do `scons OPT1 OPT2` and then just +`scons`, it will reconfigure everything without settings `OPT1` and +`OPT2`. Every other build system remembers build options from the +previous invocation. + +## Bazel + +### Pros + +Proven to scale to very large projects. + +### Cons + +Implemented in Java. Poor Windows support. Heavily focused on Google's +way of doing things (which may be a good or a bad thing). Contributing +code requires [signing a CLA](https://bazel.build/contributing.html). + +## Meson + +### Pros + +The fastest build system [see +measurements](Performance-comparison.md), user friendly, designed to +be as invisible to the developer as possible, native support for +modern tools (precompiled headers, coverage, Valgrind etc). Not Turing +complete so build definition files are easy to read and understand. + +### Cons + +Relatively new so it does not have a large user base yet, and may thus +contain some unknown bugs. Visual Studio and XCode backends not as +high quality as Ninja one. diff --git a/meson/docs/markdown/Compiler-properties.md b/meson/docs/markdown/Compiler-properties.md new file mode 100644 index 000000000..e231e162f --- /dev/null +++ b/meson/docs/markdown/Compiler-properties.md @@ -0,0 +1,230 @@ +# Compiler properties + +Not all compilers and platforms are alike. Therefore Meson provides +the tools to detect properties of the system during configure time. To +get most of this information, you first need to extract the *compiler +object* from the main *meson* variable. + +```meson +compiler = meson.get_compiler('c') +``` + +Here we extract the C compiler. We could also have given the argument +`cpp` to get the C++ compiler, `objc` to get the objective C compiler +and so on. The call is valid for all languages specified in the +*project* declaration. Trying to obtain some other compiler will lead +to an unrecoverable error. + +## System information + +This is a bit complex and more thoroughly explained on the page on +[cross compilation](Cross-compilation.md). But if you just want to +know the operating system your code will run on, issue this command: + +```meson +host_machine.system() +``` + +## Compiler id + +The compiler object method `get_id` returns a +lower case string describing the "family" of the compiler. Since 0.53.0 +`get_linker_id` returns a lower case string with the linker name. Since +compilers can often choose from multiple linkers depending on operating +system, `get_linker_id` can be useful for handling or mitigating effects +of particular linkers. + +The compiler object also has a method `get_argument_syntax` which +returns a lower case string of `gcc`, `msvc`, or another undefined string +value; identifying whether the compiler arguments use the same syntax as +either `gcc` or `msvc`, or that its arguments are not like either. This should +only be used to select the syntax of the arguments, such as those to test +with `has_argument`. + +See [reference tables](Reference-tables.md#compiler-ids) for a list of +supported compiler ids and their argument type. + +## Does code compile? + +Sometimes the only way to test the system is to try to compile some +sample code and see if it works. For example, this can test that a +"C++17" compiler actually supports a particular C++17 feature, +without resorting to maintaining a feature list vs. compiler vendor, +compiler version and operating system. +Testing that a code snippet runs is a two-phase operation. First +we define some code using the multiline string operator: + +```meson +code = '''#include +void func() { printf("Compile me.\n"); } +''' +``` + +Then we can run the test. + +```meson +result = compiler.compiles(code, name : 'basic check') +``` + +The variable *result* will now contain either `true` or `false` +depending on whether the compilation succeeded or not. The keyword +argument `name` is optional. If it is specified, Meson will write the +result of the check to its log. + +## Does code compile and link? + +Sometimes it is necessary to check whether a certain code fragment not +only compiles, but also links successfully, e.g. to check if a symbol +is actually present in a library. This can be done using the +'''.links()''' method on a compiler object like this: + +```meson +code = '''#include +void func() { printf("Compile me.\n"); } +''' +``` + +Then we can run the test. + +```meson +result = compiler.links(code, args : '-lfoo', name : 'link check') +``` + +The variable *result* will now contain either `true` or `false` +depending on whether the compilation and linking succeeded or not. The +keyword argument `name` is optional. If it is specified, Meson will +write the result of the check to its log. + +## Compile and run test application + +Here is how you would compile and run a small test application. +Testing if a code snippets **runs** versus merely that it links +is particularly important for some dependencies such as MPI. + +```meson +code = '''#include +int main(int argc, char **argv) { + printf("%s\n", "stdout"); + fprintf(stderr, "%s\n", "stderr"); + return 0; +} +''' +result = compiler.run(code, name : 'basic check') +``` + +The `result` variable encapsulates the state of the test, which can be +extracted with the following methods. The `name` keyword argument +works the same as with `compiles`. + +| Method | Return value | +| ------ | ------------ | +| compiled | `True` if compilation succeeded. If `false` then all other methods return undefined values. | +| returncode | The return code of the application as an integer | +| stdout | Program's standard out as text. | +| stderr | Program's standard error as text. | + +Here is an example usage: + +```meson +if result.stdout().strip() == 'some_value' + # do something +endif +``` + +## Does a header exist? + +Header files provided by different platforms vary quite a lot. Meson +has functionality to detect whether a given header file is available +on the system. The test is done by trying to compile a simple test +program that includes the specified header. The following snippet +describes how this feature can be used. + +```meson +if compiler.has_header('sys/fstat.h') + # header exists, do something +endif +``` + +## Expression size + +Often you need to determine the size of a particular element (such as +`int`, `wchar_t` or `char*`). Using the `compiler` variable mentioned +above, the check can be done like this. + +```meson +wcharsize = compiler.sizeof('wchar_t', prefix : '#include') +``` + +This will put the size of `wchar_t` as reported by sizeof into +variable `wcharsize`. The keyword argument `prefix` is optional. If +specified its contents is put at the top of the source file. This +argument is typically used for setting `#include` directives in +configuration files. + +In older versions (<= 0.30) Meson would error out if the size could +not be determined. Since version 0.31 it returns -1 if the size could +not be determined. + +## Does a function exist? + +Just having a header doesn't say anything about its contents. +Sometimes you need to explicitly check if some function exists. This +is how we would check whether the function `open_memstream` exists in +header `stdio.h` + +```meson +if compiler.has_function('open_memstream', prefix : '#include ') + # function exists, do whatever is required. +endif +``` + +Note that, on macOS programs can be compiled targeting older macOS +versions than the one that the program is compiled on. It can't be +assumed that the OS version that is compiled on matches the OS version +that the binary will run on. + +Therefore when detecting function availability with `has_function`, it +is important to specify the correct header in the prefix argument. + +In the example above, the function `open_memstream` is detected, which +was introduced in macOS 10.13. When the user builds on macOS 10.13, +but targeting macOS 10.11 (`-mmacosx-version-min=10.11`), this will +correctly report the function as missing. Without the header however, +it would lack the necessary availability information and incorrectly +report the function as available. + +## Does a structure contain a member? + +Some platforms have different standard structures. Here's how one +would check if a struct called `mystruct` from header `myheader.h` +contains a member called `some_member`. + +```meson +if compiler.has_member('struct mystruct', 'some_member', prefix : '#include') + # member exists, do whatever is required +endif +``` + +## Type alignment + +Most platforms can't access some data types at any address. For +example it is common that a `char` can be at any address but a 32 bit +integer only at locations which are divisible by four. Determining the +alignment of data types is simple. + +```meson +int_alignment = compiler.alignment('int') # Will most likely contain the value 4. +``` + +## Has argument + +This method tests if the compiler supports a given command line +argument. This is implemented by compiling a small file with the given +argument. + +```meson +has_special_flags = compiler.has_argument('-Wspecialthing') +``` + +*Note*: some compilers silently swallow command line arguments they do +not understand. Thus this test can not be made 100% reliable. diff --git a/meson/docs/markdown/Conference-presentations.md b/meson/docs/markdown/Conference-presentations.md new file mode 100644 index 000000000..b800129ac --- /dev/null +++ b/meson/docs/markdown/Conference-presentations.md @@ -0,0 +1,31 @@ +# Conference presentations on Meson + +- FOSDEM 2014, [Introducing the Meson build + system](https://video.fosdem.org/2014/H2215_Ferrer/Sunday/Introducing_the_Meson_build_system.webm) + (jpakkane) + +- LCA 2015, [Making build systems not + suck](https://www.youtube.com/watch?v=KPi0AuVpxLI) (jpakkane) + +- GUADEC 2015, [Improving the way Gnome apps are + built](https://www.youtube.com/watch?v=wTf0NjjNwTU) (jpakkane) + +- GStreamer conference 2015, [Done in 6.0 + seconds](https://gstconf.ubicast.tv/videos/done-in-60-seconds-a-new-build-system-for-gstreamer) + (jpakkane) + +- LCA 2016, [Builds, dependencies and deployment in the modern + multiplatform world](https://www.youtube.com/watch?v=CTJtKtQ8R5k) + (jpakkane) + +- GUADEC 2016, [Making your GNOME app compile 2.4x + faster](https://media.ccc.de/v/44-making_your_gnome_app_compile_24x_faster) + (nirbheek) + +- Libre Application Summit 2016, [New world, new + tools](https://youtu.be/0-gx1qU2pPo) (jpakkane) + +- GStreamer conference 2016, [GStreamer Development on Windows and + faster builds everywhere with + Meson](https://gstconf.ubicast.tv/videos/gstreamer-development-on-windows-ans-faster-builds-everywhere-with-meson/) + (tpm) diff --git a/meson/docs/markdown/Configuration.md b/meson/docs/markdown/Configuration.md new file mode 100644 index 000000000..55f01709e --- /dev/null +++ b/meson/docs/markdown/Configuration.md @@ -0,0 +1,198 @@ +--- +short-description: Build-time configuration options +... + +# Configuration + +If there are multiple configuration options, passing them through +compiler flags becomes very burdensome. It also makes the +configuration settings hard to inspect. To make things easier, Meson +supports the generation of configure files. This feature is similar to +one found in other build systems such as CMake. + +Suppose we have the following Meson snippet: + +```meson +conf_data = configuration_data() +conf_data.set('version', '1.2.3') +configure_file(input : 'config.h.in', + output : 'config.h', + configuration : conf_data) +``` + +and that the contents of `config.h.in` are + +```c +#define VERSION_STR "@version@" +``` + +Meson will then create a file called `config.h` in the corresponding +build directory whose contents are the following. + +```c +#define VERSION_STR "1.2.3" +``` + +More specifically, Meson will find all strings of the type `@varname@` +and replace them with respective values set in `conf_data`. You can +use a single `configuration_data` object as many times as you like, +but it becomes immutable after being passed to the `configure_file` +function. That is, after it has been used once to generate output the +`set` function becomes unusable and trying to call it causes an error. +Copy of immutable `configuration_data` is still immutable. + +For more complex configuration file generation Meson provides a second +form. To use it, put a line like this in your configuration file. + + #mesondefine TOKEN + +The replacement that happens depends on what the value and type of TOKEN is: + +```c +#define TOKEN // If TOKEN is set to boolean true. +#undef TOKEN // If TOKEN is set to boolean false. +#define TOKEN 4 // If TOKEN is set to an integer or string value. +/* undef TOKEN */ // If TOKEN has not been set to any value. +``` + +Note that if you want to define a C string, you need to do the quoting +yourself like this: + +```meson +conf_data.set('TOKEN', '"value"') +``` + +Since this is such a common operation, Meson provides a convenience +method: + +```meson +plain_var = 'value' +conf_data.set_quoted('TOKEN', plain_var) # becomes #define TOKEN "value" +``` + +Often you have a boolean value in Meson but need to define the C/C++ +token as 0 or 1. Meson provides a convenience function for this use +case. + +```meson +conf_data.set10(token, boolean_value) +# The line above is equivalent to this: +if boolean_value + conf_data.set(token, 1) +else + conf_data.set(token, 0) +endif +``` + +## Configuring without an input file + +If the input file is not defined then Meson will generate a header +file all the entries in the configuration data object. The +replacements are the same as when generating `#mesondefine` entries: + +```meson +conf_data.set('FOO', '"string"') => #define FOO "string" +conf_data.set('FOO', 'a_token') => #define FOO a_token +conf_data.set('FOO', true) => #define FOO +conf_data.set('FOO', false) => #undef FOO +conf_data.set('FOO', 1) => #define FOO 1 +conf_data.set('FOO', 0) => #define FOO 0 +``` + +In this mode, you can also specify a comment which will be placed +before the value so that your generated files are self-documenting. + +```meson +conf_data.set('BAR', true, description : 'Set BAR if it is available') +``` + +Will produce: + +```c +/* Set BAR if it is available */ +#define BAR +``` + +## Dealing with file encodings + +The default Meson file encoding to configure files is utf-8. If you +need to configure a file that is not utf-8 encoded the encoding +keyword will allow you to specify which file encoding to use. It is +however strongly advised to convert your non utf-8 file to utf-8 +whenever possible. Supported file encodings are those of python3, see +[standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings). + +## Using dictionaries + +Since *0.49.0* `configuration_data()` takes an optional dictionary as +first argument. If provided, each key/value pair is added into the +`configuration_data` as if `set()` method was called for each of them. +`configure_file()`'s `configuration` kwarg also accepts a dictionary +instead of a configuration_data object. + +Example: +```meson +cdata = configuration_data({ + 'STRING' : '"foo"', + 'INT' : 42, + 'DEFINED' : true, + 'UNDEFINED' : false, +}) + +configure_file(output : 'config1.h', + configuration : cdata, +) + +configure_file(output : 'config2.h', + configuration : { + 'STRING' : '"foo"', + 'INT' : 42, + 'DEFINED' : true, + 'UNDEFINED' : false, + } +) + +``` + +# A full example + +Generating and using a configuration file requires the following steps: + + - generate the file + - create an include directory object for the directory that holds the file + - use it in a target + +We are going to use the traditional approach of generating a header +file in the top directory. The common name is `config.h` but we're +going to use an unique name. This avoids the problem of accidentally +including the wrong header file when building a project with many +subprojects. + +At the top level we generate the file: + +```meson +conf_data = configuration_data() +# Set data +configure_file(input : 'projconfig.h.in', + output : 'projconfig.h', + configuration : conf_data) +``` + +Immediately afterwards we generate the include object. + +```meson +configuration_inc = include_directories('.') +``` + +Finally we specify this in a target that can be in any subdirectory. + +```meson +executable(..., include_directories : configuration_inc) +``` + +Now any source file in this target can include the configuration +header like this: + +```c +#include +``` diff --git a/meson/docs/markdown/Configuring-a-build-directory.md b/meson/docs/markdown/Configuring-a-build-directory.md new file mode 100644 index 000000000..c55267656 --- /dev/null +++ b/meson/docs/markdown/Configuring-a-build-directory.md @@ -0,0 +1,120 @@ +--- +short-description: Configuring a pre-generated build directory +... + +# Configuring a build directory + +Often you want to change the settings of your build after it has been +generated. For example you might want to change from a debug build +into a release build, set custom compiler flags, change the build +options provided in your `meson_options.txt` file and so on. + +The main tool for this is the `meson configure` command. + +You invoke `meson configure` by giving it the location of your build +dir. If omitted, the current working directory is used instead. Here's +a sample output for a simple project. + + Core properties + + Source dir /home/jpakkane/clangdemo/2_address + Build dir /home/jpakkane/clangdemo/2_address/buildmeson + + Core options: + Option Current Value Possible Values Description + ------ ------------- --------------- ----------- + auto_features auto [enabled, disabled, auto] Override value of all 'auto' features + backend ninja [ninja, vs, vs2010, vs2015, vs2017, vs2019, xcode] Backend to use + buildtype release [plain, debug, debugoptimized, release, minsize, custom] Build type to use + debug false [true, false] Debug + default_library shared [shared, static, both] Default library type + install_umask 0022 [preserve, 0000-0777] Default umask to apply on permissions of installed files + layout mirror [mirror, flat] Build directory layout + optimization 3 [0, g, 1, 2, 3, s] Optimization level + strip false [true, false] Strip targets on install + unity off [on, off, subprojects] Unity build + warning_level 1 [0, 1, 2, 3] Compiler warning level to use + werror false [true, false] Treat warnings as errors + + Backend options: + Option Current Value Possible Values Description + ------ ------------- --------------- ----------- + backend_max_links 0 >=0 Maximum number of linker processes to run or 0 for no limit + + Base options: + Option Current Value Possible Values Description + ------ ------------- --------------- ----------- + b_asneeded true [true, false] Use -Wl,--as-needed when linking + b_colorout always [auto, always, never] Use colored output + b_coverage false [true, false] Enable coverage tracking. + b_lto false [true, false] Use link time optimization + b_lundef true [true, false] Use -Wl,--no-undefined when linking + b_ndebug false [true, false, if-release] Disable asserts + b_pch true [true, false] Use precompiled headers + b_pgo off [off, generate, use] Use profile guided optimization + b_sanitize none [none, address, thread, undefined, memory, address,undefined] Code sanitizer to use + b_staticpic true [true, false] Build static libraries as position independent + + Compiler options: + Option Current Value Possible Values Description + ------ ------------- --------------- ----------- + c_args [] Extra arguments passed to the C compiler + c_link_args [] Extra arguments passed to the C linker + c_std c99 [none, c89, c99, c11, c17, c18, c2x, gnu89, gnu99, gnu11, gnu17, gnu18, gnu2x] C language standard to use + cpp_args [] Extra arguments passed to the C++ compiler + cpp_debugstl false [true, false] STL debug mode + cpp_link_args [] Extra arguments passed to the C++ linker + cpp_std c++11 [none, c++98, c++03, c++11, c++14, c++17, c++1z, c++2a, c++20, gnu++03, gnu++11, gnu++14, gnu++17, gnu++1z, gnu++2a, gnu++20] C++ language standard to use + fortran_std [] [none, legacy, f95, f2003, f2008, f2018] language standard to use + + Directories: + Option Current Value Description + ------ ------------- ----------- + bindir bin Executable directory + datadir share Data file directory + includedir include Header file directory + infodir share/info Info page directory + libdir lib/x86_64-linux-gnu Library directory + libexecdir libexec Library executable directory + localedir share/locale Locale data directory + localstatedir /var/local Localstate data directory + mandir share/man Manual page directory + prefix /usr/local Installation prefix + sbindir sbin System executable directory + sharedstatedir /var/local/lib Architecture-independent data directory + sysconfdir etc Sysconf data directory + + Project options: + Option Current Value Possible Values Description + ------ ------------- --------------- ----------- + array_opt [one, two] [one, two, three] array_opt + combo_opt three [one, two, three] combo_opt + free_array_opt [one, two] free_array_opt + integer_opt 3 >=0, <=5 integer_opt + other_one false [true, false] other_one + some_feature enabled [enabled, disabled, auto] some_feature + someoption optval An option + + Testing options: + Option Current Value Possible Values Description + ------ ------------- --------------- ----------- + errorlogs true [true, false] Whether to print the logs from failing tests + stdsplit true [true, false] Split stdout and stderr in test logs + +These are all the options available for the current project arranged +into related groups. The first column in every field is the name of +the option. To set an option you use the `-D` option. For example, +changing the installation prefix from `/usr/local` to `/tmp/testroot` +you would issue the following command. + + meson configure -Dprefix=/tmp/testroot + +Then you would run your build command (usually `meson compile`), which +would cause Meson to detect that the build setup has changed and do +all the work required to bring your build tree up to date. + +Since 0.50.0, it is also possible to get a list of all build options +by invoking [`meson configure`](Commands.md#configure) with the +project source directory or the path to the root `meson.build`. In +this case, Meson will print the default values of all options similar +to the example output from above. diff --git a/meson/docs/markdown/Contact-information.md b/meson/docs/markdown/Contact-information.md new file mode 100644 index 000000000..e908a3f7c --- /dev/null +++ b/meson/docs/markdown/Contact-information.md @@ -0,0 +1,14 @@ +# Contact information + +For general discussion and questions, it is strongly recommended that +you use the [mailing +list](https://groups.google.com/forum/#!forum/mesonbuild). + +If you find bugs, please file them in the [issue +tracker](https://github.com/jpakkane/meson/issues). + +The maintainer of Meson is Jussi Pakkanen. You should usually not +contact him privately but rather use the channels listed above. +However if such a need arises, he can be reached at gmail where his +username is `jpakkane` (that is not a typo, the last letter is indeed +*e*). diff --git a/meson/docs/markdown/Continuous-Integration.md b/meson/docs/markdown/Continuous-Integration.md new file mode 100644 index 000000000..f24bdb06d --- /dev/null +++ b/meson/docs/markdown/Continuous-Integration.md @@ -0,0 +1,279 @@ +# Continuous Integration + +Here you will find snippets to use Meson with various CI such as +Travis and AppVeyor. + +Please [file an issue](https://github.com/mesonbuild/meson/issues/new) +if these instructions don't work for you. + +## Travis-CI with Docker + +Travis with Docker gives access to newer non-LTS Ubuntu versions with +pre-installed libraries of your choice. + +This `yml` file is derived from the +[configuration used by Meson](https://github.com/mesonbuild/meson/blob/master/.travis.yml) +for running its own tests. + +```yaml +os: + - linux + - osx + +language: + - cpp + +services: + - docker + +before_install: + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install python3 ninja; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then pip3 install meson; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull YOUR/REPO:eoan; fi + +script: + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM YOUR/REPO:eoan > Dockerfile; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && meson test -C builddir"; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && meson test -C builddir; fi +``` + +## CircleCi for Linux (with Docker) + +[CircleCi](https://circleci.com/) can work for spinning all of the +Linux images you wish. Here's a sample `yml` file for use with that. + +```yaml +version: 2.1 + +executors: + # Your dependencies would go in the docker images that represent + # the Linux distributions you are supporting + meson_ubuntu_builder: + docker: + - image: your_dockerhub_username/ubuntu-sys + + meson_debian_builder: + docker: + - image: your_dockerhub_username/debian-sys + + meson_fedora_builder: + docker: + - image: your_dockerhub_username/fedora-sys + +jobs: + meson_ubuntu_build: + executor: meson_ubuntu_builder + steps: + - checkout + - run: meson setup builddir --backend ninja + - run: meson compile -C builddir + - run: meson test -C builddir + + meson_debian_build: + executor: meson_debian_builder + steps: + - checkout + - run: meson setup builddir --backend ninja + - run: meson compile -C builddir + - run: meson test -C builddir + + meson_fedora_build: + executor: meson_fedora_builder + steps: + - checkout + - run: meson setup builddir --backend ninja + - run: meson compile -C builddir + - run: meson test -C builddir + +workflows: + version: 2 + linux_workflow: + jobs: + - meson_ubuntu_build + - meson_debian_build + - meson_fedora_build +``` + +## AppVeyor for Windows + +For CI on Windows, [AppVeyor](https://www.appveyor.com/) has a wide +selection of [default +configurations](https://www.appveyor.com/docs/windows-images-software/). +AppVeyor also has +[MacOS](https://www.appveyor.com/docs/macos-images-software/) and +[Linux](https://www.appveyor.com/docs/linux-images-software/) CI +images. This is a sample `appveyor.yml` file for Windows with Visual +Studio 2015 and 2017. + +```yaml +image: Visual Studio 2017 + +environment: + matrix: + - arch: x86 + compiler: msvc2015 + - arch: x64 + compiler: msvc2015 + - arch: x86 + compiler: msvc2017 + - arch: x64 + compiler: msvc2017 + +platform: + - x64 + +install: + # Set paths to dependencies (based on architecture) + - cmd: if %arch%==x86 (set PYTHON_ROOT=C:\python37) else (set PYTHON_ROOT=C:\python37-x64) + # Print out dependency paths + - cmd: echo Using Python at %PYTHON_ROOT% + # Add necessary paths to PATH variable + - cmd: set PATH=%cd%;%PYTHON_ROOT%;%PYTHON_ROOT%\Scripts;%PATH% + # Install meson and ninja + - cmd: pip install ninja meson + # Set up the build environment + - cmd: if %compiler%==msvc2015 ( call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %arch% ) + - cmd: if %compiler%==msvc2017 ( call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" %arch% ) + +build_script: + - cmd: echo Building on %arch% with %compiler% + - cmd: meson --backend=ninja builddir + - cmd: meson compile -C builddir + +test_script: + - cmd: meson test -C builddir +``` + +### Qt + +For Qt 5, add the following line near the `PYTHON_ROOT` assignment: + +```yaml + - cmd: if %arch%==x86 (set QT_ROOT=C:\Qt\5.11\%compiler%) else (set QT_ROOT=C:\Qt\5.11\%compiler%_64) +``` + +And afterwards add `%QT_ROOT%\bin` to the `PATH` variable. + +You might have to adjust your build matrix as there are, for example, +no msvc2017 32-bit builds. Visit the [Build +Environment](https://www.appveyor.com/docs/build-environment/) page in +the AppVeyor docs for more details. + +### Boost + +The following statement is sufficient for Meson to find Boost: + +```yaml + - cmd: set BOOST_ROOT=C:\Libraries\boost_1_67_0 +``` + +## Travis without Docker + +Non-Docker Travis-CI builds can use Linux, MacOS or Windows. +Set the desired compiler(s) in the build **matrix**. +This example is for **Linux** (Ubuntu 18.04) and **C**. + +```yaml +dist: bionic +group: travis_latest + +os: linux +language: python + +matrix: + include: + - env: CC=gcc + - env: CC=clang + +install: + - pip install meson ninja + +script: + - meson builddir + - meson compile -C builddir + - meson test -C builddir +``` + +## GitHub Actions + +GitHub Actions are distinct from Azure Pipelines in their workflow +syntax. It can be easier to setup specific CI tasks in Actions than +Pipelines, depending on the particular task. This is an example file: +.github/workflows/ci_meson.yml supposing the project is C-based, using +GCC on Linux, Mac and Windows. The optional `on:` parameters only run +this CI when the C code is changed--corresponding ci_python.yml might +run only on "**.py" file changes. + +```yml +name: ci_meson + +on: + push: + paths: + - "**.c" + - "**.h" + pull_request: + paths: + - "**.c" + - "**.h" + +jobs: + + linux: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: pip install meson ninja + - run: meson setup builddir/ + env: + CC: gcc + - run: meson test -C builddir/ -v + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: Linux_Meson_Testlog + path: builddir/meson-logs/testlog.txt + + macos: + runs-on: macos-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: brew install gcc + - run: pip install meson ninja + - run: meson setup builddir/ + env: + CC: gcc + - run: meson test -C builddir/ -v + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: MacOS_Meson_Testlog + path: builddir/meson-logs/testlog.txt + + windows: + runs-on: windows-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: pip install meson ninja + - run: meson setup builddir/ + env: + CC: gcc + - run: meson test -C builddir/ -v + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: Windows_Meson_Testlog + path: builddir/meson-logs/testlog.txt +``` diff --git a/meson/docs/markdown/Contributing.md b/meson/docs/markdown/Contributing.md new file mode 100644 index 000000000..bb2f60488 --- /dev/null +++ b/meson/docs/markdown/Contributing.md @@ -0,0 +1,568 @@ +--- +short-description: Contributing to Meson +... + +# Contributing to Meson + +A large fraction of Meson is contributed by people outside the core +team. This documentation explains some of the design rationales of +Meson as well as how to create and submit your patches for inclusion +to Meson. + +Thank you for your interest in participating to the development. + +## Submitting patches + +All changes must be submitted as [pull requests to +Github](https://github.com/mesonbuild/meson/pulls). This causes them +to be run through the CI system. All submissions must pass a full CI +test run before they are even considered for submission. + +## Keeping pull requests up to date + +It is possible that while your pull request is being reviewed, other +changes are committed to master that cause merge conflicts that must +be resolved. The basic rule for this is very simple: keep your pull +request up to date using rebase _only_. + +Do not merge head back to your branch. Any merge commits in your pull +request make it not acceptable for merging into master and you must +remove them. + +## Special procedure for new features + +Every new feature requires some extra steps, namely: + +- Must include a project test under `test cases/`, or if that's not + possible or if the test requires a special environment, it must go + into `run_unittests.py`. +- Must be registered with the [FeatureChecks + framework](Release-notes-for-0.47.0.md#feature-detection-based-on-meson_version-in-project) + that will warn the user if they try to use a new feature while + targeting an older Meson version. +- Needs a release note snippet inside `docs/markdown/snippets/` with + a heading and a brief paragraph explaining what the feature does + with an example. + +## Acceptance and merging + +The kind of review and acceptance any merge proposal gets depends on +the changes it contains. All pull requests must be reviewed and +accepted by someone with commit rights who is not the original +submitter. Merge requests can be roughly split into three different +categories. + +The first one consists of MRs that only change the markdown +documentation under `docs/markdown`. Anyone with access rights can +push changes to these directly to master. For major changes it is +still recommended to create a MR so other people can comment on it. + +The second group consists of merges that don't change any +functionality, fixes to the CI system and bug fixes that have added +regression tests (see below) and don't change existing +functionality. Once successfully reviewed anyone with merge rights can +merge these to master. + +The final kind of merges are those that add new functionality or +change existing functionality in a backwards incompatible way. These +require the approval of the project lead. + +In a simplified list form the split would look like the following: + +- members with commit access can do: + - documentation changes (directly to master if warranted) + - bug fixes that don't change functionality + - refactorings + - new dependency types + - new tool support (e.g. a new Doxygen-kind of tool) + - support for new compilers to existing languages +- project leader decision is needed for: + - new modules + - new functions in the Meson language + - syntax changes for Meson files + - changes breaking backwards compatibility + - support for new languages + +## Strategy for merging pull requests to trunk + +Meson's merge strategy should fulfill the following guidelines: + +- preserve as much history as possible + +- have as little junk in the repo as possible + +- everything in the "master lineage" should always pass all tests + +These goals are slightly contradictory so the correct thing to do +often requires some judgement on part of the person doing the +merge. Github provides three different merge options, The rules of +thumb for choosing between them goes like this: + +- single commit pull requests should always be rebased + +- a pull request with one commit and one "fixup" commit (such as + testing something to see if it passes CI) should be squashed + +- large branches with many commits should be merged with a merge + commit, especially if one of the commits does not pass all tests + (which happens in e.g. large and difficult refactorings) + +If in doubt, ask for guidance on IRC. + +## Tests + +All new features must come with automatic tests that thoroughly prove +that the feature is working as expected. Similarly bug fixes must come +with a unit test that demonstrates the bug, proves that it has been +fixed and prevents the feature from breaking in the future. + +Sometimes it is difficult to create a unit test for a given bug. If +this is the case, note this in your pull request. We may permit bug +fix merge requests in these cases. This is done on a case by case +basis. Sometimes it may be easier to write the test than convince the +maintainers that one is not needed. Exercise judgment and ask for help +in problematic cases. + +The tests are split into two different parts: unit tests and full +project tests. To run all tests, execute `./run_tests.py`. Unit tests +can be run with `./run_unittests.py` and project tests with +`./run_project_tests.py`. + +### Project tests + +Subsets of project tests can be selected with +`./run_project_tests.py --only` option. This can save a great deal of +time when only a certain part of Meson is being tested. +For example, a useful and easy contribution to Meson is making +sure the full set of compilers is supported. One could for example test +various Fortran compilers by setting `FC=ifort` or `FC=flang` or similar +with `./run_project_test.py --only fortran`. +Some families of tests require a particular backend to run. +For example, all the CUDA project tests run and pass on Windows via +`./run_project_tests.py --only cuda --backend ninja` + +Each project test is a standalone project that can be compiled on its +own. They are all in the `test cases` subdirectory. The simplest way to +run a single project test is to do something like `./meson.py test\ +cases/common/1\ trivial builddir`. The one exception to this is `test +cases/unit` directory discussed below. + +The test cases in the `common` subdirectory are meant to be run always +for all backends. They should only depend on C and C++, without any +external dependencies such as libraries. Tests that require those are +in the `test cases/frameworks` directory. If there is a need for an +external program in the common directory, such as a code generator, it +should be implemented as a Python script. The goal of test projects is +also to provide sample projects that end users can use as a base for +their own projects. + +All project tests follow the same pattern: they are configured, +compiled, tests are run and finally install is run. Passing means that +configuring, building and tests succeed and that installed files match +those expected. + +Any tests that require more thorough analysis, such as checking that +certain compiler arguments can be found in the command line or that +the generated pkg-config files actually work should be done with a +unit test. + +Additionally: + +* `crossfile.ini` and `nativefile.ini` are passed to the configure step with +`--cross-file` and `--native-file` options, respectively. + +* `mlog.cmd_ci_include()` can be called from anywhere inside Meson to +capture the contents of an additional file into the CI log on failure. + +Projects needed by unit tests are in the `test cases/unit` +subdirectory. They are not run as part of `./run_project_tests.py`. + +### Configuring project tests + +The (optional) `test.json` file, in the root of a test case, is used +for configuring the test. All of the following root entries in the `test.json` +are independent of each other and can be combined as needed. + +Exanple `test.json`: + +```json +{ + "env": { + "VAR": "VAL" + }, + "installed": [ + { "type": "exe", "file": "usr/bin/testexe" }, + { "type": "pdb", "file": "usr/bin/testexe" }, + { "type": "shared_lib", "file": "usr/lib/z", "version": "1.2.3" }, + ], + "matrix": { + "options": { + "opt1": [ + { "val": "abc" }, + { "val": "qwert" }, + { "val": "bad" } + ], + "opt2": [ + { "val": null }, + { "val": "true" }, + { "val": "false" }, + ] + }, + "exclude": [ + { "opt1": "qwert", "opt2": "false" }, + { "opt1": "bad" } + ] + }, + "tools": { + "cmake": ">=3.11" + } +} +``` + +#### env + +The `env` key contains a dictionary which specifies additional +environment variables to be set during the configure step of the test. + +There is some basic support for configuring the string with the `@@` syntax: + +- `@ROOT@`: absolute path of the source directory +- `@PATH@`: current value of the `PATH` env variable + +#### installed + +The `installed` dict contains a list of dicts, describing which files are expected +to be installed. Each dict contains the following keys: + +- `file` +- `type` +- `platform` (optional) +- `version` (optional) +- `language` (optional) + +The `file` entry contains the relative path (from the install root) to the +actually installed file. + +The `type` entry specifies how the `file` path should be interpreted based on the +current platform. The following values are currently supported: + +| type | Description | +| ------------- | ------------------------------------------------------------------------------------------------------- | +| `file` | No postprocessing, just use the provided path | +| `python_file` | Use the provided path while replacing the python directory. | +| `dir` | To include all files inside the directory (for generated docs, etc). The path must be a valid directory | +| `exe` | For executables. On Windows the `.exe` suffix is added to the path in `file` | +| `shared_lib` | For shared libraries, always written as `name`. The appropriate suffix and prefix are added by platform | +| `python_lib` | For python libraries, while replacing the python directory. The appropriate suffix is added by platform | +| `pdb` | For Windows PDB files. PDB entries are ignored on non Windows platforms | +| `implib` | For Windows import libraries. These entries are ignored on non Windows platforms | +| `py_implib` | For Windows import libraries. These entries are ignored on non Windows platforms | +| `implibempty` | Like `implib`, but no symbols are exported in the library | +| `expr` | `file` is an expression. This type should be avoided and removed if possible | + +Except for the `file`, `python_file` and `expr` types, all paths should be provided *without* a suffix. + +| Argument | Applies to | Description | +| -----------|----------------------------|-------------------------------------------------------------------------------| +| `version` | `shared_lib`, `pdb` | Sets the version to look for appropriately per-platform | +| `language` | `pdb` | Determines which compiler/linker determines the existence of this file | + +The `shared_lib` and `pdb` types takes an optional additional +parameter, `version`, this is us a string in `X.Y.Z` format that will +be applied to the library. Each version to be tested must have a +single version. The harness will apply this correctly per platform: + +The `python_file`, `python_lib`, and `py_implib` types have basic support for configuring the string with the `@@` syntax: + +- `@PYTHON_PLATLIB@`: python `get_install_dir` directory relative to prefix +- `@PYTHON_PURELIB@`: python `get_install_dir(pure: true)` directory relative to prefix + +`pdb` takes an optional `language` argument. This determines which +compiler/linker should generate the pdb file. Because it's possible to +mix compilers that do and don't generate pdb files (dmd's optlink +doesn't). Currently this is only needed when mixing D and C code. + +```json +{ + "type": "shared_lib", "file": "usr/lib/lib", + "type": "shared_lib", "file": "usr/lib/lib", "version": "1", + "type": "shared_lib", "file": "usr/lib/lib", "version": "1.2.3.", +} +``` + +This will be applied appropriately per platform. On windows this +expects `lib.dll` and `lib-1.dll`. on MacOS it expects `liblib.dylib` +and `liblib.1.dylib`. On other Unices it expects `liblib.so`, +`liblib.so.1`, and `liblib.so.1.2.3`. + +If the `platform` key is present, the installed file entry is only +considered if the platform matches. The following values for +`platform` are currently supported: + +| platform | Description | +| ---------- | -------------------------------------------------------------------- | +| `msvc` | Matches when a msvc like compiler is used (`msvc`, `clang-cl`, etc.) | +| `gcc` | Not `msvc` | +| `cygwin` | Matches when the platform is cygwin | +| `!cygwin` | Not `cygwin` | + +#### matrix + +The `matrix` section can be used to define a test matrix to run +project tests with different Meson options. + +In the `options` dict, all possible options and their values are +specified. Each key in the `options` dict is a Meson option. It stores +a list of all potential values in a dict format. + +Each value must contain the `val` key for the value of the option. +`null` can be used for adding matrix entries without the current +option. + +The `skip_on_env`, `skip_on_jobname` and `skip_on_os` keys (as described below) +may be used in the value to skip that matrix entry, based on the current +environment. + +Similarly, the `compilers` key can be used to define a mapping of +compilers to languages that are required for this value. + +```json +{ + "compilers": { + "c": "gcc", + "cpp": "gcc", + "d": "gdc" + } +} +``` + +Specific option combinations can be excluded with the `exclude` +section. It should be noted that `exclude` does not require exact +matches. Instead, any matrix entry containing all option value +combinations in `exclude` will be excluded. Thus an empty dict (`{}`) +to will match **all** elements in the test matrix. + +The above example will produce the following matrix entries: +- `opt1=abc` +- `opt1=abc opt2=true` +- `opt1=abc opt2=false` +- `opt1=qwert` +- `opt1=qwert opt2=true` + +#### do_not_set_opts + +Currently supported values are: +- `prefix` +- `libdir` + +#### tools + +This section specifies a dict of tool requirements in a simple +key-value format. If a tool is specified, it has to be present in the +environment, and the version requirement must be fulfilled. Otherwise, +the entire test is skipped (including every element in the test +matrix). + +#### stdout + +The `stdout` key contains a list of dicts, describing the expected +stdout. + +Each dict contains the following keys: + +- `line` +- `match` (optional) + +Each item in the list is matched, in order, against the remaining +actual stdout lines, after any previous matches. If the actual stdout +is exhausted before every item in the list is matched, the expected +output has not been seen, and the test has failed. + +The `match` element of the dict determines how the `line` element is +matched: + +| Type | Description | +| -------- | ----------------------- | +| `literal` | Literal match (default) | +| `re` | regex match | + +#### skip_on_env + +The `skip_on_env` key can be used to specify a list of environment variables. If +at least one environment variable in the `skip_on_env` list is present, the test +is skipped. + +#### skip_on_jobname + +The `skip_on_jobname` key contains a list of strings. If the `MESON_CI_JOBNAME` +environment variable is set, and any of them are a sub-string of it, the test is +expected to be skipped (that is, it is expected that the test will output +`MESON_SKIP_TEST`, because the CI environment is not one in which it can run, +for whatever reason). + +The test is failed if it skips or runs unexpectedly. + +#### skip_on_os + +The `skip_on_os` key can be used to specify a list of OS names (or their +negations, prefixed with a `!`). If at least one item in the `skip_on_os` list +is matched, the test is expected to be skipped. + +The test is failed if it skips or runs unexpectedly. + +### Skipping integration tests + +Meson uses several continuous integration testing systems that have +slightly different interfaces for indicating a commit should be +skipped. + +Continuous integration systems currently used: +- [Azure Pipelines](https://docs.microsoft.com/en-us/azure/devops/pipelines/scripts/git-commands?view=vsts&tabs=yaml#how-do-i-avoid-triggering-a-ci-build-when-the-script-pushes) + allows `***NO_CI***` in the commit message. +- [Sider](https://sider.review) + runs Flake8 ([see below](#python-coding-style)) + +To promote consistent naming policy, use: + +- `[skip ci]` in the commit title if you want to disable all + integration tests + +## Documentation + +The `docs` directory contains the full documentation that will be used +to generate [the Meson web site](http://mesonbuild.com). Every change +in functionality must change the documentation pages. In most cases +this means updating the reference documentation page but bigger +changes might need changes in other documentation, too. + +All new functionality needs to have a mention in the release +notes. These features should be written in standalone files in the +`docs/markdown/snippets` directory. The release manager will combine +them into one page when doing the release. + +[Integration tests should be disabled](#skipping-integration-tests) for +documentation-only commits by putting `[skip ci]` into commit title. +Reviewers should ask contributors to put `[skip ci]` into the title because +tests are run again after merge for `master`. + +## Python Coding style + +Meson follows the basic Python coding style. Additional rules are the +following: + +- indent 4 spaces, no tabs ever +- indent meson.build files with two spaces +- try to keep the code as simple as possible +- contact the mailing list before embarking on large scale projects + to avoid wasted effort + +Meson uses Flake8 for style guide enforcement. The Flake8 options for +the project are contained in .flake8. + +To run Flake8 on your local clone of Meson: + +```console +$ python3 -m pip install flake8 +$ cd meson +$ flake8 +``` + +To run it automatically before committing: + +```console +$ flake8 --install-hook=git +$ git config --bool flake8.strict true +``` + +## C/C++ coding style + +Meson has a bunch of test code in several languages. The rules for +those are simple. + +- indent 4 spaces, no tabs ever +- brace always on the same line as if/for/else/function definition + +## External dependencies + +The goal of Meson is to be as easily usable as possible. The user +experience should be "get Python3 and Ninja, run", even on +Windows. Unfortunately this means that we can't have dependencies on +projects outside of Python's standard library. This applies only to +core functionality, though. For additional helper programs etc the use +of external dependencies may be ok. If you feel that you are dealing +with this kind of case, please contact the developers first with your +use case. + +## Turing completeness + +The main design principle of Meson is that the definition language is +not Turing complete. Any change that would make Meson Turing complete +is automatically rejected. In practice this means that defining your +own functions inside `meson.build` files and generalised loops will +not be added to the language. + +## Do I need to sign a CLA in order to contribute? + +No you don't. All contributions are welcome. + +## No lingering state + +Meson operates in much the same way as functional programming +languages. It has inputs, which include `meson.build` files, values of +options, compilers and so on. These are passed to a function, which +generates output build definition. This function is pure, which means that: + +- for any given input the output is always the same +- running Meson twice in a row _always_ produce the same output in both runs + +The latter one is important, because it enforces that there is no way +for "secret state" to pass between consecutive invocations of +Meson. This is the reason why, for example, there is no `set_option` +function even though there is a `get_option` one. + +If this were not the case, we could never know if the build output is +"stable". For example suppose there were a `set_option` function and a +boolean variable `flipflop`. Then you could do this: + +```meson +set_option('flipflop', not get_option('flipflop')) +``` + +This piece of code would never converge. Every Meson run would change +the value of the option and thus the output you get out of this build +definition would be random. + +Meson does not permit this by forbidding these sorts of covert +channels. + +There is one exception to this rule. Users can call into external +commands with `run_command`. If the output of that command does not +behave like a pure function, this problem arises. Meson does not try +to guard against this case, it is the responsibility of the user to +make sure the commands they run behave like pure functions. + +## Environment variables + +Environment variables are like global variables, except that they are +also hidden by default. Envvars should be avoided whenever possible, +all functionality should be exposed in better ways such as command +line switches. + +## Random design points that fit nowhere else + +- All features should follow the 90/9/1 rule. 90% of all use cases + should be easy, 9% should be possible and it is totally fine to not + support the final 1% if it would make things too complicated. + +- Any build directory will have at most two toolchains: one native and + one cross. + +- Prefer specific solutions to generic frameworks. Solve the end + user's problems rather than providing them tools to do it + themselves. + +- Never use features of the Unix shell (or Windows shell for that + matter). Doing things like forwarding output with `>` or invoking + multiple commands with `&&` are not permitted. Whenever these sorts + of requirements show up, write an internal Python script with the + desired functionality and use that instead. diff --git a/meson/docs/markdown/Creating-Linux-binaries.md b/meson/docs/markdown/Creating-Linux-binaries.md new file mode 100644 index 000000000..71c96e470 --- /dev/null +++ b/meson/docs/markdown/Creating-Linux-binaries.md @@ -0,0 +1,133 @@ +--- +short-description: Creating universal Linux binaries +... + +# Creating Linux binaries + +Creating Linux binaries that can be downloaded and run on any distro +(like .dmg packages for OSX or .exe installers for Windows) has +traditionally been difficult. This is even more tricky if you want to +use modern compilers and features, which is especially desired in game +development. There is still no simple turn-key solution for this +problem but with a bit of setup it can be relatively straightforward. + +## Installing system and GCC + +First you need to do a fresh operating system install. You can use +spare hardware, VirtualBox, cloud or whatever you want. Note that the +distro you install must be *at least as old* as the oldest release you +wish to support. Debian stable is usually a good choice, though +immediately after its release you might want to use Debian oldstable +or the previous Ubuntu LTS. The oldest supported version of CentOS is +also a good choice. + +Once you have installed the system, you need to install +build-dependencies for GCC. In Debian-based distros this can be done +with the following commands: + +```console +$ apt-get build-dep g++ +$ apt-get install pkg-config libgmp-dev libmpfr-dev libmpc-dev +``` + +Then create a `src` subdirectory in your home directory. Copy-paste +the following into `install_gcc.sh` and execute it. + +```bash +#!/bin/sh + +wget ftp://ftp.fu-berlin.de/unix/languages/gcc/releases/gcc-4.9.2/gcc-4.9.2.tar.bz2 +tar xf gcc-4.9.2.tar.bz2 + +mkdir objdir +cd objdir +../gcc-4.9.2/configure --disable-bootstrap --prefix=${HOME}/devroot \ + --disable-multilib --enable-languages=c,c++ +make -j 4 +make install-strip +ln -s gcc ${HOME}/devroot/bin/cc +``` + +Then finally add the following lines to your `.bashrc`. + +```console +$ export LD_LIBRARY_PATH=${HOME}/devroot/lib +$ export PATH=${HOME}/devroot/bin:$PATH +$ export PKG_CONFIG_PATH=${HOME}/devroot/lib/pkgconfig +``` + +Log out and back in and now your build environment is ready to use. + +## Adding other tools + +Old distros might have too old versions of some tools. For Meson this +could include Python 3 and Ninja. If this is the case you need to +download, build and install new versions into `~/devroot` in the usual +way. + +## Adding dependencies + +You want to embed and statically link every dependency you can +(especially C++ dependencies). Meson's [Wrap package +manager](Wrap-dependency-system-manual.md) might be of use here. This +is equivalent to what you would do on Windows, OSX, Android etc. +Sometimes static linking is not possible. In these cases you need to +copy the .so files inside your package. Let's use SDL2 as an example. +First we download and install it as usual giving it our custom install +prefix (that is, `./configure --prefix=${HOME}/devroot`). This makes +Meson's dependency detector pick it up automatically. + +## Building and installing + +Building happens in much the same way as normally. There are just two +things to note. First, you must tell GCC to link the C++ standard +library statically. If you don't then your app is guaranteed to break +as different distros have binary-incompatible C++ libraries. The +second thing is that you need to point your install prefix to some +empty staging area. Here's the Meson command to do that: + +```console +$ LDFLAGS=-static-libstdc++ meson --prefix=/tmp/myapp +``` + +The aim is to put the executable in `/tmp/myapp/bin` and shared +libraries to `/tmp/myapp/lib`. The next thing you need is the +embedder. It takes your dependencies (in this case only +`libSDL2-2.0.so.0`) and copies them in the lib directory. Depending on +your use case you can either copy the files by hand or write a script +that parses the output of `ldd binary_file`. Be sure not to copy +system libraries (`libc`, `libpthread`, `libm` etc). For an example, +see the [sample +project](https://github.com/jpakkane/meson/tree/master/manual%20tests/4%20standalone%20binaries). + +Make the script run during install with this: + +```meson +meson.add_install_script('linux_bundler.sh') +``` + +## Final steps + +If you try to run the program now it will most likely fail to start or +crashes. The reason for this is that the system does not know that the +executable needs libraries from the `lib` directory. The solution for +this is a simple wrapper script. Create a script called `myapp.sh` +with the following content: + +```bash +#!/bin/bash + +cd "${0%/*}" +export LD_LIBRARY_PATH="$(pwd)/lib" +bin/myapp +``` + +Install it with this Meson snippet: + +```meson +install_data('myapp.sh', install_dir : '.') +``` + +And now you are done. Zip up your `/tmp/myapp` directory and you have +a working binary ready for deployment. To run the program, just unzip +the file and run `myapp.sh`. diff --git a/meson/docs/markdown/Creating-OSX-packages.md b/meson/docs/markdown/Creating-OSX-packages.md new file mode 100644 index 000000000..849d5fd5d --- /dev/null +++ b/meson/docs/markdown/Creating-OSX-packages.md @@ -0,0 +1,158 @@ +--- +short-description: Tools to create OS X packages +... + +# Creating OSX packages + +Meson does not have native support for building OSX packages but it +does provide all the tools you need to create one yourself. The reason +for this is that it is a very hard task to write a system that +provides for all the different ways to do that but it is very easy to +write simple scripts for each application. + +Sample code for this can be found in [the Meson manual test +suite](https://github.com/jpakkane/meson/tree/master/manual%20tests/4%20standalone%20binaries). + +## Creating an app bundle + +OSX app bundles are actually extremely simple. They are just a +directory of files in a certain format. All the details you need to +know are on [this +page](https://stackoverflow.com/questions/1596945/building-osx-app-bundle) +and it is highly recommended that you read it first. + +Let's assume that we are creating our app bundle into +`/tmp/myapp.app`. Suppose we have one executable, so we need to +install that into `Contents/MacOS`. If we define the executable like +this: + +```meson +executable('myapp', 'foo1.c', ..., install : true) +``` + +then we just need to initialize our build tree with this command: + +```console +$ meson --prefix=/tmp/myapp.app \ + --bindir=Contents/MacOS \ + builddir \ + +``` + +Now when we do `meson install` the bundle is properly staged. If you +have any resource files or data, you need to install them into +`Contents/Resources` either by custom install commands or specifying +more install paths to the Meson command. + +Next we need to install an `Info.plist` file and an icon. For those we +need the following two Meson definitions. + +```meson +install_data('myapp.icns', install_dir : 'Contents/Resources') +install_data('Info.plist', install_dir : 'Contents') +``` + +The format of `Info.plist` can be found in the link or the sample +project linked above. The simplest way to get an icon in the `icns` +format is to save your image as a tiff an then use the `tiff2icns` helper +application that comes with XCode. + +Some applications assume that the working directory of the app process +is the same where the binary executable is. If this is the case for +you, then you need to create a wrapper script that looks like this: + +```bash +#!/bin/bash + +cd "${0%/*}" +./myapp +``` + +install it with this: + +```meson +install_data('myapp.sh', install_dir : 'Contents/MacOS') +``` + +and make sure that you specify `myapp.sh` as the executable to run in +your `Info.plist`. + +If you are not using any external libraries, this is all you need to +do. You now have a full app bundle in `/tmp/myapp.app` that you can +use. Most applications use third party frameworks and libraries, +though, so you need to add them to the bundle so it will work on other +peoples' machines. + +As an example we are going to use the [SDL2](https://libsdl.org/) +framework. In order to bundle it in our app, we first specify an +installer script to run. + +```meson +meson.add_install_script('install_script.sh') +``` + +The install script does two things. First it copies the whole +framework into our bundle. + +```console +$ mkdir -p ${MESON_INSTALL_PREFIX}/Contents/Frameworks +$ cp -R /Library/Frameworks/SDL2.framework \ + ${MESON_INSTALL_PREFIX}/Contents/Frameworks +``` + +Then it needs to alter the library search path of our +executable(s). This tells OSX that the libraries your app needs are +inside your bundle. In the case of SDL2, the invocation goes like +this: + +```console +$ install_name_tool -change @rpath/SDL2.framework/Versions/A/SDL2 \ + @executable_path/../FrameWorks/SDL2.framework/Versions/A/SDL2 \ + ${MESON_INSTALL_PREFIX}/Contents/MacOS/myapp +``` + +This is the part of OSX app bundling that you must always do +manually. OSX dependencies come in many shapes and forms and +unfortunately there is no reliable automatic way to determine how each +dependency should be handled. Frameworks go to the `Frameworks` +directory while plain `.dylib` files usually go to +`Contents/Resources/lib` (but you can put them wherever you like). To +get this done you have to check what your program links against with +`otool -L /path/to/binary` and manually add the copy and fix steps to +your install script. Do not copy system libraries inside your bundle, +though. + +After this you have a fully working, self-contained OSX app bundle +ready for distribution. + +## Creating a .dmg installer + +A .dmg installer is similarly quite simple, at its core it is +basically a fancy compressed archive. A good description can be found +on [this page](https://el-tramo.be/guides/fancy-dmg/). Please read it +and create a template image file according to its instructions. + +The actual process of creating the installer is very simple: you mount +the template image, copy your app bundle in it, unmount it and convert +the image into a compressed archive. The actual commands to do this +are not particularly interesting, feel free to steal them from either +the linked page above or from the sample script in Meson's test suite. + +## Putting it all together + +There are many ways to put the .dmg installer together and different +people will do it in different ways. The linked sample code does it by +having two different scripts. This separates the different pieces +generating the installer into logical pieces. + +`install_script.sh` only deals with embedding dependencies and fixing +the library paths. + +`build_osx_installer.sh` sets up the build with the proper paths, +compiles, installs and generates the .dmg package. + +The main reasoning here is that in order to build a complete OSX +installer package from source, all you need to do is to cd into the +source tree and run `./build_osx_installer.sh`. To build packages on +other platforms you would write scripts such as +`build_windows_installer.bat` and so on. diff --git a/meson/docs/markdown/Creating-releases.md b/meson/docs/markdown/Creating-releases.md new file mode 100644 index 000000000..b8244596e --- /dev/null +++ b/meson/docs/markdown/Creating-releases.md @@ -0,0 +1,80 @@ +--- +short-description: Creating releases +... + +# Creating releases + +In addition to development, almost all projects provide periodical +source releases. These are standalone packages (usually either in +tar or zip format) of the source code. They do not contain any +revision control metadata, only the source code. Meson provides +a simple way of generating these, with the `meson dist` command. + +Meson provides a simple way of generating these. It consists of a +single command *(available since 0.52.0)*: + +```sh +meson dist +``` + +or alternatively (on older Meson versions with `ninja` backend): + +```sh +ninja dist +``` + +This creates a file called `projectname-version.tar.xz` in the build +tree subdirectory `meson-dist`. This archive contains the full +contents of the latest commit in revision control including all the +submodules (recursively). All revision control metadata is removed. +Meson then takes this archive and tests that it works by doing a full +`compile` + `test` + `install` cycle. If all these pass, Meson will +then create a `SHA-256` checksum file next to the archive. + + +## Autotools dist VS Meson dist + +Meson behaviour is different from Autotools. The Autotools "dist" +target packages up the current source tree. Meson packages the latest +revision control commit. The reason for this is that it prevents +developers from doing accidental releases where the distributed +archive does not match any commit in revision control (especially the +one tagged for the release). + + +## Include subprojects in your release + +The `meson dist` command has `--include-subprojects` command line +option. When enabled, the source tree of all subprojects used by the +current build will also be included in the final tarball. This is +useful to distribute self contained tarball that can be built offline +(i.e. `--wrap-mode=nodownload`). + + +## Skip build and test with `--no-tests` + +The `meson dist` command has a `--no-tests` option to skip build and +tests steps of generated packages. It can be used to not waste time +for example when done in CI that already does its own testing. + +So with `--no-tests` you can tell Meson "Do not build and test generated +packages.". + +## Release a subproject separately + +*Since 0.57.0* the `meson dist` command can now create a distribution tarball +for a subproject in the same git repository as the main project. This can be +useful if parts of the project (e.g. libraries) can be built and distributed +separately. In that case they can be moved into `subprojects/mysub` and running +`meson dist` in that directory will now create a tarball containing only the +source code from that subdir and not the rest of the main project or other +subprojects. + +For example: +```sh +git clone https://github.com/myproject +cd myproject/subprojects/mysubproject +meson builddir +meson dist -C builddir +``` +This produces `builddir/meson-dist/mysubproject-1.0.tar.xz` tarball. diff --git a/meson/docs/markdown/Cross-compilation.md b/meson/docs/markdown/Cross-compilation.md new file mode 100644 index 000000000..4410c0363 --- /dev/null +++ b/meson/docs/markdown/Cross-compilation.md @@ -0,0 +1,348 @@ +--- +short-description: Setting up cross-compilation +... + +# Cross compilation + +Meson has full support for cross compilation. Since cross compiling is +more complicated than native building, let's first go over some +nomenclature. The three most important definitions are traditionally +called *build*, *host* and *target*. This is confusing because those +terms are used for quite many different things. To simplify the issue, +we are going to call these the *build machine*, *host machine* and +*target machine*. Their definitions are the following: + +* *build machine* is the computer that is doing the actual compiling. +* *host machine* is the machine on which the compiled binary will run. +* *target machine* is the machine on which the compiled binary's + output will run, *only meaningful* if the program produces + machine-specific output. + +The `tl/dr` summary is the following: if you are doing regular cross +compilation, you only care about `build_machine` and +`host_machine`. Just ignore `target_machine` altogether and you will +be correct 99% of the time. Only compilers and similar tools care +about the target machine. In fact, for so-called "multi-target" tools +the target machine need not be fixed at build-time like the others but +chosen at runtime, so `target_machine` *still* doesn't matter. If your +needs are more complex or you are interested in the actual details, do +read on. + +This might be easier to understand through examples. Let's start with +the regular, not cross-compiling case. In these cases all of these +three machines are the same. Simple so far. + +Let's next look at the most common cross-compilation setup. Let's +suppose you are on a 64 bit OSX machine and you are cross compiling a +binary that will run on a 32 bit ARM Linux board. In this case your +*build machine* is 64 bit OSX, your *host machine* is 32 bit ARM Linux +and your *target machine* is irrelevant (but defaults to the same +value as the *host machine*). This should be quite understandable as +well. + +The usual mistake in this case is to call the OSX system the *host* +and the ARM Linux board the *target*. That's because these were their +actual names when the cross-compiler itself was compiled! Let's assume +the cross-compiler was created on OSX too. When that happened the +*build* and *host machines* were the same OSX and different from the +ARM Linux *target machine*. + +In a nutshell, the typical mistake assumes that the terms *build*, +*host* and *target* refer to some fixed positions whereas they're +actually relative to where the current compiler is running. Think of +*host* as a *child* of the current compiler and *target* as an +optional *grand-child*. Compilers don't change their terminology when +they're creating another compiler, that would at the very least make +their user interface much more complex. + +The most complicated case is when you cross-compile a cross compiler. +As an example you can, on a Linux machine, generate a cross compiler +that runs on Windows but produces binaries on MIPS Linux. In this case +*build machine* is x86 Linux, *host machine* is x86 Windows and +*target machine* is MIPS Linux. This setup is known as the [Canadian +Cross](https://en.wikipedia.org/wiki/Cross_compiler#Canadian_Cross). +As a side note, be careful when reading cross compilation articles on +Wikipedia or the net in general. It is very common for them to get +build, host and target mixed up, even in consecutive sentences, which +can leave you puzzled until you figure it out. + +Again note that when you cross-compile something, the 3 systems +(*build*, *host*, and *target*) used when building the cross compiler +don't align with the ones used when building something with that +newly-built cross compiler. To take our Canadian Cross scenario from +above (for full generality), since its *host machine* is x86 Windows, +the *build machine* of anything we build with it is *x86 Windows*. And +since its *target machine* is MIPS Linux, the *host machine* of +anything we build with it is *MIPS Linux*. Only the *target machine* +of whatever we build with it can be freely chosen by us, say if we +want to build another cross compiler that runs on MIPS Linux and +targets Aarch64 iOS. As this example hopefully makes clear to you, the +machine names are relative and shifted over to the left by one +position. + +If you did not understand all of the details, don't worry. For most +people it takes a while to wrap their head around these concepts. +Don't panic, it might take a while to click, but you will get the hang +of it eventually. + +## Defining the environment + +Meson requires you to write a cross build definition file. It defines +various properties of the cross build environment. The cross file +consists of different sections. + +There are a number of options shared by cross and native files, +[here](Machine-files.md). It is assumed that you have read that +section already, as this documentation will only call out options +specific to cross files. + +### Binaries + +```ini +[binaries] +exe_wrapper = 'wine' # A command used to run generated executables. +``` + +The `exe_wrapper` option defines a *wrapper command* that can be used +to run executables for this host. In this case we can use Wine, which +runs Windows applications on Linux. Other choices include running the +application with qemu or a hardware simulator. If you have this kind +of a wrapper, these lines are all you need to write. Meson will +automatically use the given wrapper when it needs to run host +binaries. This happens e.g. when running the project's test suite. + +### Properties + +In addition to the properties allowed in [all machine +files](Machine-files.md#properties), the cross file may contain +specific information about the cross compiler or the host machine. It +looks like this: + +```ini +[properties] +sizeof_int = 4 +sizeof_wchar_t = 4 +sizeof_void* = 4 + +alignment_char = 1 +alignment_void* = 4 +alignment_double = 4 + +has_function_printf = true + +sys_root = '/some/path' +pkg_config_libdir = '/some/path/lib/pkgconfig' +``` + +In most cases you don't need the size and alignment settings, Meson +will detect all these by compiling and running some sample programs. +If your build requires some piece of data that is not listed here, +Meson will stop and write an error message describing how to fix the +issue. If you need extra compiler arguments to be used during cross +compilation you can set them with `[langname]_args = [args]`. Just +remember to specify the args as an array and not as a single string +(i.e. not as `'-DCROSS=1 -DSOMETHING=3'`). + +*Since 0.52.0* The `sys_root` property may point to the root of the +host system path (the system that will run the compiled binaries). +This is used internally by Meson to set the PKG_CONFIG_SYSROOT_DIR +environment variable for pkg-config. If this is unset the host system +is assumed to share a root with the build system. + +*Since 0.54.0* The pkg_config_libdir property may point to a list of +path used internally by Meson to set the PKG_CONFIG_LIBDIR environment +variable for pkg-config. This prevents pkg-config from searching cross +dependencies in system directories. + +One important thing to note, if you did not define an `exe_wrapper` in +the previous section, is that Meson will make a best-effort guess at +whether it can run the generated binaries on the build machine. It +determines whether this is possible by looking at the `system` and +`cpu_family` of build vs host. There will however be cases where they +do match up, but the build machine is actually not compatible with the +host machine. Typically this will happen if the libc used by the build +and host machines are incompatible, or the code relies on kernel +features not available on the build machine. One concrete example is a +macOS build machine producing binaries for an iOS Simulator x86-64 +host. They're both `darwin` and the same architecture, but their +binaries are not actually compatible. In such cases you may use the +`needs_exe_wrapper` property to override the auto-detection: + +```ini +[properties] +needs_exe_wrapper = true +``` + +### Machine Entries + +The next bit is the definition of host and target machines. Every +cross build definition must have one or both of them. If it had +neither, the build would not be a cross build but a native build. You +do not need to define the build machine, as all necessary information +about it is extracted automatically. The definitions for host and +target machines look the same. Here is a sample for host machine. + +```ini +[host_machine] +system = 'windows' +cpu_family = 'x86' +cpu = 'i686' +endian = 'little' +``` + +These values define the machines sufficiently for cross compilation +purposes. The corresponding target definition would look the same but +have `target_machine` in the header. These values are available in +your Meson scripts. There are three predefined variables called, +surprisingly, `build_machine`, `host_machine` and `target_machine`. +Determining the operating system of your host machine is simply a +matter of calling `host_machine.system()`. + +There are two different values for the CPU. The first one is +`cpu_family`. It is a general type of the CPU. This should have a +value from [the CPU Family table](Reference-tables.md#cpu-families). +*Note* that Meson does not add `el` to end cpu_family value for little +endian systems. Big endian and little endian mips are both just +`mips`, with the `endian` field set approriately. + +The second value is `cpu` which is a more specific subtype for the +CPU. Typical values for a `x86` CPU family might include `i386` or +`i586` and for `arm` family `armv5` or `armv7hl`. Note that CPU type +strings are very system dependent. You might get a different value if +you check its value on the same machine but with different operating +systems. + +If you do not define your host machine, it is assumed to be the build +machine. Similarly if you do not specify target machine, it is assumed +to be the host machine. + + +## Starting a cross build + + +Once you have the cross file, starting a build is simple + +```console +$ meson srcdir builddir --cross-file cross_file.txt +``` + +Once configuration is done, compilation is started by invoking `meson compile` +in the usual way. + +## Introspection and system checks + +The main *meson* object provides two functions to determine cross +compilation status. + +```meson +meson.is_cross_build() # returns true when cross compiling +meson.can_run_host_binaries() # returns true if the host binaries can be run, either with a wrapper or natively +``` + +You can run system checks on both the system compiler or the cross +compiler. You just have to specify which one to use. + +```meson +build_compiler = meson.get_compiler('c', native : true) +host_compiler = meson.get_compiler('c', native : false) + +build_int_size = build_compiler.sizeof('int') +host_int_size = host_compiler.sizeof('int') +``` + +## Mixing host and build targets + +Sometimes you need to build a tool which is used to generate source +files. These are then compiled for the actual target. For this you +would want to build some targets with the system's native compiler. +This requires only one extra keyword argument. + +```meson +native_exe = executable('mygen', 'mygen.c', native : true) +``` + +You can then take `native_exe` and use it as part of a generator rule or anything else you might want. + +## Using a custom standard library + +Sometimes in cross compilation you need to build your own standard +library instead of using the one provided by the compiler. Meson has +built-in support for switching standard libraries transparently. The +invocation to use in your cross file is the following: + +```ini +[properties] +c_stdlib = ['mylibc', 'mylibc_dep'] # Subproject name, variable name +``` + +This specifies that C standard library is provided in the Meson +subproject `mylibc` in internal dependency variable `mylibc_dep`. It +is used on every cross built C target in the entire source tree +(including subprojects) and the standard library is disabled. The +build definitions of these targets do not need any modification. + +Note that it is supported for any language, not only `c`, using `_stdlib` +property. + +Since *0.56.0* the variable name parameter is no longer required as long as the +subproject calls `meson.override_dependency('c_stdlib', mylibc_dep)`. +The above example becomes: + +```ini +[properties] +c_stdlib = 'mylibc' +``` + +## Changing cross file settings + +Cross file settings are only read when the build directory is set up +the first time. Any changes to them after the fact will be ignored. +This is the same as regular compiles where you can't change the +compiler once a build tree has been set up. If you need to edit your +cross file, then you need to wipe your build tree and recreate it from +scratch. + +## Custom data + +You can store arbitrary data in `properties` and access them from your +Meson files. As an example if you cross file has this: + +```ini +[properties] +somekey = 'somevalue' +``` + +then you can access that using the `meson` object like this: + +```meson +myvar = meson.get_external_property('somekey') +# myvar now has the value 'somevalue' +``` + +## Cross file locations + +As of version 0.44.0 Meson supports loading cross files from system +locations (except on Windows). This will be +$XDG_DATA_DIRS/meson/cross, or if XDG_DATA_DIRS is undefined, then +/usr/local/share/meson/cross and /usr/share/meson/cross will be tried +in that order, for system wide cross files. User local files can be +put in $XDG_DATA_HOME/meson/cross, or ~/.local/share/meson/cross if +that is undefined. + +The order of locations tried is as follows: + - A file relative to the local dir + - The user local location + - The system wide locations in order + +Distributions are encouraged to ship cross files either with their +cross compiler toolchain packages or as a standalone package, and put +them in one of the system paths referenced above. + +These files can be loaded automatically without adding a path to the +cross file. For example, if a ~/.local/share/meson/cross contains a +file called x86-linux, then the following command would start a cross +build using that cross files: + +```sh +meson builddir/ --cross-file x86-linux +``` diff --git a/meson/docs/markdown/Cuda-module.md b/meson/docs/markdown/Cuda-module.md new file mode 100644 index 000000000..24a607a72 --- /dev/null +++ b/meson/docs/markdown/Cuda-module.md @@ -0,0 +1,186 @@ +--- +short-description: CUDA module +authors: + - name: Olexa Bilaniuk + years: [2019] + has-copyright: false +... + +# Unstable CUDA Module +_Since: 0.50.0_ + +This module provides helper functionality related to the CUDA Toolkit and +building code using it. + + +**Note**: this module is unstable. It is only provided as a technology preview. +Its API may change in arbitrary ways between releases or it might be removed +from Meson altogether. + + +## Importing the module + +The module may be imported as follows: + +``` meson +cuda = import('unstable-cuda') +``` + +It offers several useful functions that are enumerated below. + + +## Functions + +### `nvcc_arch_flags()` +_Since: 0.50.0_ + +``` meson +cuda.nvcc_arch_flags(cuda_version_string, ..., + detected: string_or_array) +``` + +Returns a list of `-gencode` flags that should be passed to `cuda_args:` in +order to compile a "fat binary" for the architectures/compute capabilities +enumerated in the positional argument(s). The flags shall be acceptable to +an NVCC with CUDA Toolkit version string `cuda_version_string`. + +A set of architectures and/or compute capabilities may be specified by: + +- The single positional argument `'All'`, `'Common'` or `'Auto'` +- As (an array of) + - Architecture names (`'Kepler'`, `'Maxwell+Tegra'`, `'Turing'`) and/or + - Compute capabilities (`'3.0'`, `'3.5'`, `'5.3'`, `'7.5'`) + +A suffix of `+PTX` requests PTX code generation for the given architecture. +A compute capability given as `A.B(X.Y)` requests PTX generation for an older +virtual architecture `X.Y` before binary generation for a newer architecture +`A.B`. + +Multiple architectures and compute capabilities may be passed in using + +- Multiple positional arguments +- Lists of strings +- Space (` `), comma (`,`) or semicolon (`;`)-separated strings + +The single-word architectural sets `'All'`, `'Common'` or `'Auto'` +cannot be mixed with architecture names or compute capabilities. Their +interpretation is: + +| Name | Compute Capability | +|-------------------|--------------------| +| `'All'` | All CCs supported by given NVCC compiler. | +| `'Common'` | Relatively common CCs supported by given NVCC compiler. Generally excludes Tegra and Tesla devices. | +| `'Auto'` | The CCs provided by the `detected:` keyword, filtered for support by given NVCC compiler. | + +The supported architecture names and their corresponding compute capabilities +are: + +| Name | Compute Capability | +|-------------------|--------------------| +| `'Fermi'` | 2.0, 2.1(2.0) | +| `'Kepler'` | 3.0, 3.5 | +| `'Kepler+Tegra'` | 3.2 | +| `'Kepler+Tesla'` | 3.7 | +| `'Maxwell'` | 5.0, 5.2 | +| `'Maxwell+Tegra'` | 5.3 | +| `'Pascal'` | 6.0, 6.1 | +| `'Pascal+Tegra'` | 6.2 | +| `'Volta'` | 7.0 | +| `'Xavier'` | 7.2 | +| `'Turing'` | 7.5 | +| `'Ampere'` | 8.0, 8.6 | + + +Examples: + + cuda.nvcc_arch_flags('10.0', '3.0', '3.5', '5.0+PTX') + cuda.nvcc_arch_flags('10.0', ['3.0', '3.5', '5.0+PTX']) + cuda.nvcc_arch_flags('10.0', [['3.0', '3.5'], '5.0+PTX']) + cuda.nvcc_arch_flags('10.0', '3.0 3.5 5.0+PTX') + cuda.nvcc_arch_flags('10.0', '3.0,3.5,5.0+PTX') + cuda.nvcc_arch_flags('10.0', '3.0;3.5;5.0+PTX') + cuda.nvcc_arch_flags('10.0', 'Kepler 5.0+PTX') + # Returns ['-gencode', 'arch=compute_30,code=sm_30', + # '-gencode', 'arch=compute_35,code=sm_35', + # '-gencode', 'arch=compute_50,code=sm_50', + # '-gencode', 'arch=compute_50,code=compute_50'] + + cuda.nvcc_arch_flags('10.0', '3.5(3.0)') + # Returns ['-gencode', 'arch=compute_30,code=sm_35'] + + cuda.nvcc_arch_flags('8.0', 'Common') + # Returns ['-gencode', 'arch=compute_30,code=sm_30', + # '-gencode', 'arch=compute_35,code=sm_35', + # '-gencode', 'arch=compute_50,code=sm_50', + # '-gencode', 'arch=compute_52,code=sm_52', + # '-gencode', 'arch=compute_60,code=sm_60', + # '-gencode', 'arch=compute_61,code=sm_61', + # '-gencode', 'arch=compute_61,code=compute_61'] + + cuda.nvcc_arch_flags('9.2', 'Auto', detected: '6.0 6.0 6.0 6.0') + cuda.nvcc_arch_flags('9.2', 'Auto', detected: ['6.0', '6.0', '6.0', '6.0']) + # Returns ['-gencode', 'arch=compute_60,code=sm_60'] + + cuda.nvcc_arch_flags(nvcc, 'All') + # Returns ['-gencode', 'arch=compute_20,code=sm_20', + # '-gencode', 'arch=compute_20,code=sm_21', + # '-gencode', 'arch=compute_30,code=sm_30', + # '-gencode', 'arch=compute_32,code=sm_32', + # '-gencode', 'arch=compute_35,code=sm_35', + # '-gencode', 'arch=compute_37,code=sm_37', + # '-gencode', 'arch=compute_50,code=sm_50', # nvcc.version() < 7.0 + # '-gencode', 'arch=compute_52,code=sm_52', + # '-gencode', 'arch=compute_53,code=sm_53', # nvcc.version() >= 7.0 + # '-gencode', 'arch=compute_60,code=sm_60', + # '-gencode', 'arch=compute_61,code=sm_61', # nvcc.version() >= 8.0 + # '-gencode', 'arch=compute_70,code=sm_70', + # '-gencode', 'arch=compute_72,code=sm_72', # nvcc.version() >= 9.0 + # '-gencode', 'arch=compute_75,code=sm_75'] # nvcc.version() >= 10.0 + +_Note:_ This function is intended to closely replicate CMake's FindCUDA module +function `CUDA_SELECT_NVCC_ARCH_FLAGS(out_variable, [list of CUDA compute architectures])` + + + +### `nvcc_arch_readable()` +_Since: 0.50.0_ + +``` meson +cuda.nvcc_arch_readable(cuda_version_string, ..., + detected: string_or_array) +``` + +Has precisely the same interface as [`nvcc_arch_flags()`](#nvcc_arch_flags), +but rather than returning a list of flags, it returns a "readable" list of +architectures that will be compiled for. The output of this function is solely +intended for informative message printing. + + archs = '3.0 3.5 5.0+PTX' + readable = cuda.nvcc_arch_readable('10.0', archs) + message('Building for architectures ' + ' '.join(readable)) + +This will print + + Message: Building for architectures sm30 sm35 sm50 compute50 + +_Note:_ This function is intended to closely replicate CMake's +FindCUDA module function `CUDA_SELECT_NVCC_ARCH_FLAGS(out_variable, +[list of CUDA compute architectures])` + + + +### `min_driver_version()` +_Since: 0.50.0_ + +``` meson +cuda.min_driver_version(cuda_version_string) +``` + +Returns the minimum NVIDIA proprietary driver version required, on the +host system, by kernels compiled with a CUDA Toolkit with the given +version string. + +The output of this function is generally intended for informative +message printing, but could be used for assertions or to conditionally +enable features known to exist within the minimum NVIDIA driver +required. diff --git a/meson/docs/markdown/Custom-build-targets.md b/meson/docs/markdown/Custom-build-targets.md new file mode 100644 index 000000000..b92bbc556 --- /dev/null +++ b/meson/docs/markdown/Custom-build-targets.md @@ -0,0 +1,46 @@ +--- +short-description: Build targets for custom languages or corner-cases +... + +# Custom build targets + +While Meson tries to support as many languages and tools as possible, +there is no possible way for it to cover all corner cases. For these +cases it permits you to define custom build targets. Here is how one +would use it. + +```meson +comp = find_program('custom_compiler') + +infile = 'source_code.txt' +outfile = 'output.bin' + +mytarget = custom_target('targetname', + output : outfile, + input : infile, + command : [comp, '@INPUT@', '@OUTPUT@'], + install : true, + install_dir : 'subdir') +``` + +This would generate the binary `output.bin` and install it to +`${prefix}/subdir/output.bin`. Variable substitution works just like +it does for source generation. + +See [Generating Sources](Generating-sources.md) for more information on this topic. + +## Details on command invocation + +Meson only permits you to specify one command to run. This is by +design as writing shell pipelines into build definition files leads to +code that is very hard to maintain. If your command requires multiple +steps you need to write a wrapper script that does all the necessary +work. + +When doing this you need to be mindful of the following issues: + +* do not assume that the command is invoked in any specific directory +* a target called `target` file `outfile` defined in subdir `subdir` + must be written to `build_dir/subdir/foo.dat` +* if you need a subdirectory for temporary files, use + `build_dir/subdir/target.dir` diff --git a/meson/docs/markdown/Cython.md b/meson/docs/markdown/Cython.md new file mode 100644 index 000000000..1d30c1f97 --- /dev/null +++ b/meson/docs/markdown/Cython.md @@ -0,0 +1,33 @@ +--- +title: Cython +short-description: Support for Cython in Meson +... + +# Cython + +Meson provides native support for cython programs starting with version 0.59.0. +This means that you can include it as a normal language, and create targets like +any other supported language: + +```meson +lib = static_library( + 'foo', + 'foo.pyx', +) +``` + +Generally Cython is most useful when combined with the python module's +extension_module method: + +```meson +project('my project', 'cython') + +py = import('python').find_installation() +dep_py = py.dependency() + +py.extension_module( + 'foo', + 'foo.pyx', + dependencies : dep_py, +) +``` diff --git a/meson/docs/markdown/D.md b/meson/docs/markdown/D.md new file mode 100644 index 000000000..4a00a3ba7 --- /dev/null +++ b/meson/docs/markdown/D.md @@ -0,0 +1,127 @@ +--- +title: D +short-description: Compiling D sources +... + +# Compiling D applications + +Meson has support for compiling D programs. A minimal `meson.build` +file for D looks like this: + +```meson +project('myapp', 'd') + +executable('myapp', 'app.d') +``` + +## [Conditional compilation](https://dlang.org/spec/version.html) + +If you are using the +[version()](https://dlang.org/spec/version.html#version-specification) +feature for conditional compilation, you can use it using the +`d_module_versions` target property: + +```meson +project('myapp', 'd') +executable('myapp', 'app.d', d_module_versions: ['Demo', 'FeatureA']) +``` + +For debugging, [debug()](https://dlang.org/spec/version.html#debug) +conditions are compiled automatically in debug builds, and extra +identifiers can be added with the `d_debug` argument: + +```meson +project('myapp', 'd') +executable('myapp', 'app.d', d_debug: [3, 'DebugFeatureA']) +``` + +## Using embedded unittests + +If you are using embedded [unittest +functions](https://dlang.org/spec/unittest.html), your source code +needs to be compiled twice, once in regular mode, and once with +unittests active. This is done by setting the `d_unittest` target +property to `true`. Meson will only ever pass the respective +compiler's `-unittest` flag, and never have the compiler generate an +empty main function. If you need that feature in a portable way, +create an empty `main()` function for unittests yourself, since the +GNU D compiler does not have this feature. + +This is an example for using D unittests with Meson: +```meson +project('myapp_tested', 'd') + +myapp_src = ['app.d', 'alpha.d', 'beta.d'] +executable('myapp', myapp_src) + +test_exe = executable('myapp_test', myapp_src, d_unittest: true) +test('myapptest', test_exe) +``` + +# Compiling D libraries and installing them + +Building D libraries is a straightforward process, not different from +how C libraries are built in Meson. You should generate a pkg-config +file and install it, in order to make other software on the system +find the dependency once it is installed. + +This is an example on how to build a D shared library: +```meson +project('mylib', 'd', version: '1.2.0') + +project_soversion = 0 +glib_dep = dependency('glib-2.0') + +my_lib = library('mylib', + ['src/mylib/libfunctions.d'], + dependencies: [glib_dep], + install: true, + version: meson.project_version(), + soversion: project_soversion, + d_module_versions: ['FeatureA', 'featureB'] +) + +pkgc = import('pkgconfig') +pkgc.generate(name: 'mylib', + libraries: my_lib, + subdirs: 'd/mylib', + version: meson.project_version(), + description: 'A simple example D library.', + d_module_versions: ['FeatureA'] +) +install_subdir('src/mylib/', install_dir: 'include/d/mylib/') +``` + +It is important to make the D sources install in a subdirectory in the +include path, in this case `/usr/include/d/mylib/mylib`. All D +compilers include the `/usr/include/d` directory by default, and if +your library would be installed into `/usr/include/d/mylib`, there is +a high chance that, when you compile your project again on a machine +where you installed it, the compiler will prefer the old installed +include over the new version in the source tree, leading to very +confusing errors. + +This is an example of how to use the D library we just built and +installed in an application: +```meson +project('myapp', 'd') + +mylib_dep = dependency('mylib', version: '>= 1.2.0') +myapp_src = ['app.d', 'alpha.d', 'beta.d'] +executable('myapp', myapp_src, dependencies: [mylib_dep]) +``` + +Please keep in mind that the library and executable would both need to +be built with the exact same D compiler and D compiler version. The D +ABI is not stable across compilers and their versions, and mixing +compilers will lead to problems. + +# Integrating with DUB + +DUB is a fully integrated build system for D, but it is also a way to +provide dependencies. Adding dependencies from the [D package +registry](https://code.dlang.org/) is pretty straight forward. You can +find how to do this in +[Dependencies](Dependencies.md#some-notes-on-dub). You can also +automatically generate a `dub.json` file as explained in +[Dlang](Dlang-module.md#generate_dub_file). diff --git a/meson/docs/markdown/Dependencies.md b/meson/docs/markdown/Dependencies.md new file mode 100644 index 000000000..b4cadb73e --- /dev/null +++ b/meson/docs/markdown/Dependencies.md @@ -0,0 +1,714 @@ +--- +short-description: Dependencies for external libraries and frameworks +... + +# Dependencies + +Very few applications are fully self-contained, but rather they use +external libraries and frameworks to do their work. Meson makes it +very easy to find and use external dependencies. Here is how one would +use the zlib compression library. + +```meson +zdep = dependency('zlib', version : '>=1.2.8') +exe = executable('zlibprog', 'prog.c', dependencies : zdep) +``` + +First Meson is told to find the external library `zlib` and error out +if it is not found. The `version` keyword is optional and specifies a +version requirement for the dependency. Then an executable is built +using the specified dependency. Note how the user does not need to +manually handle compiler or linker flags or deal with any other +minutiae. + +If you have multiple dependencies, pass them as an array: + +```meson +executable('manydeps', 'file.c', dependencies : [dep1, dep2, dep3, dep4]) +``` + +If the dependency is optional, you can tell Meson not to error out if +the dependency is not found and then do further configuration. + +```meson +opt_dep = dependency('somedep', required : false) +if opt_dep.found() + # Do something. +else + # Do something else. +endif +``` + +You can pass the `opt_dep` variable to target construction functions +whether the actual dependency was found or not. Meson will ignore +non-found dependencies. + +Meson also allows to get variables that are defined in the +`pkg-config` file. This can be done by using the +`get_pkgconfig_variable` function. + +```meson +zdep_prefix = zdep.get_pkgconfig_variable('prefix') +``` + +These variables can also be redefined by passing the `define_variable` +parameter, which might be useful in certain situations: + +```meson +zdep_prefix = zdep.get_pkgconfig_variable('libdir', define_variable: ['prefix', '/tmp']) +``` + +The dependency detector works with all libraries that provide a +`pkg-config` file. Unfortunately several packages don't provide +pkg-config files. Meson has autodetection support for some of these, +and they are described [later in this +page](#dependencies-with-custom-lookup-functionality). + +# Arbitrary variables from dependencies that can be found multiple ways + +*Note* new in 0.51.0 +*new in 0.54.0, the `internal` keyword* + +When you need to get an arbitrary variables from a dependency that can +be found multiple ways and you don't want to constrain the type you +can use the generic `get_variable` method. This currently supports +cmake, pkg-config, and config-tool based variables. + +```meson +foo_dep = dependency('foo') +var = foo_dep.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default') +``` + +It accepts the keywords 'cmake', 'pkgconfig', 'pkgconfig_define', +'configtool', 'internal', and 'default_value'. 'pkgconfig_define' +works just like the 'define_variable' argument to +`get_pkgconfig_variable`. When this method is invoked the keyword +corresponding to the underlying type of the dependency will be used to +look for a variable. If that variable cannot be found or if the caller +does not provide an argument for the type of dependency, one of the +following will happen: If 'default_value' was provided that value will +be returned, if 'default_value' was not provided then an error will be +raised. + +# Declaring your own + +You can declare your own dependency objects that can be used +interchangeably with dependency objects obtained from the system. The +syntax is straightforward: + +```meson +my_inc = include_directories(...) +my_lib = static_library(...) +my_dep = declare_dependency(link_with : my_lib, + include_directories : my_inc) +``` + +This declares a dependency that adds the given include directories and +static library to any target you use it in. + +# Building dependencies as subprojects + +Many platforms do not provide a system package manager. On these +systems dependencies must be compiled from source. Meson's subprojects +make it simple to use system dependencies when they are available and +to build dependencies manually when they are not. + +To make this work, the dependency must have Meson build definitions +and it must declare its own dependency like this: + +```meson + foo_dep = declare_dependency(...) +``` + +Then any project that wants to use it can write out the following +declaration in their main `meson.build` file. + +```meson + foo_dep = dependency('foo', fallback : ['foo', 'foo_dep']) +``` + +What this declaration means is that first Meson tries to look up the +dependency from the system (such as by using pkg-config). If it is not +available, then it builds subproject named `foo` and from that +extracts a variable `foo_dep`. That means that the return value of +this function is either an external or an internal dependency object. +Since they can be used interchangeably, the rest of the build +definitions do not need to care which one it is. Meson will take care +of all the work behind the scenes to make this work. + +# Dependency detection method + +You can use the keyword `method` to let Meson know what method to use +when searching for the dependency. The default value is `auto`. +Additional methods are `pkg-config`, `config-tool`, `cmake`, +`builtin`, `system`, `sysconfig`, `qmake`, `extraframework` and `dub`. + +```meson +cups_dep = dependency('cups', method : 'pkg-config') +``` + +For dependencies without [specific detection +logic](#dependencies-with-custom-lookup-functionality), the dependency method +order for `auto` is: + + 1. `pkg-config` + 2. `cmake` + 3. `extraframework` (OSX only) + +## System + +Some dependencies provide no valid methods for discovery, or do so only in +some cases. Some examples of this are Zlib, which provides both pkg-config +and cmake, except when it is part of the base OS image (such as in FreeBSD +and macOS); OpenGL which has pkg-config on Unices from glvnd or mesa, but has +no pkg-config on macOS and Windows. + +In these cases Meson provides convenience wrappers in the form of `system` +dependencies. Internally these dependencies do exactly what a user would do +in the build system DSL or with a script, likely calling +`compiler.find_library()`, setting `link_with` and `include_directories`. By +putting these in Meson upstream the barrier of using them is lowered, as +projects using Meson don't have to re-implement the logic. + +## Builtin + +Some dependencies provide no valid methods for discovery on some systems, +because they are provided internally by the language. One example of this is +intl, which is built into GNU or musl libc but otherwise comes as a `system` +dependency. + +In these cases Meson provides convenience wrappers for the `system` dependency, +but first checks if the functionality is usable by default. + +## CMake + +Meson can use the CMake `find_package()` function to detect +dependencies with the builtin `Find.cmake` modules and exported +project configurations (usually in `/usr/lib/cmake`). Meson is able to +use both the old-style `_LIBRARIES` variables as well as +imported targets. + +It is possible to manually specify a list of CMake targets that should +be used with the `modules` property. However, this step is optional +since Meson tries to automatically guess the correct target based on +the name of the dependency. + +Depending on the dependency it may be necessary to explicitly specify +a CMake target with the `modules` property if Meson is unable to guess +it automatically. + +```meson + cmake_dep = dependency('ZLIB', method : 'cmake', modules : ['ZLIB::ZLIB']) +``` + +Support for adding additional `COMPONENTS` for the CMake +`find_package` lookup is provided with the `components` kwarg +(*introduced in 0.54.0*). All specified componets will be passed +directly to `find_package(COMPONENTS)`. + +Support for packages which require a specified version for CMake +`find_package` to succeed is provided with the `cmake_package_version` +kwarg (*introduced in 0.57.0*). The specified `cmake_package_version` +will be passed directly as the second parameter to `find_package`. + +It is also possible to reuse existing `Find.cmake` files with +the `cmake_module_path` property. Using this property is equivalent to +setting the `CMAKE_MODULE_PATH` variable in CMake. The path(s) given +to `cmake_module_path` should all be relative to the project source +directory. Absolute paths should only be used if the CMake files are +not stored in the project itself. + +Additional CMake parameters can be specified with the `cmake_args` +property. + +## Dub + +Please understand that Meson is only able to find dependencies that +exist in the local Dub repository. You need to manually fetch and +build the target dependencies. + +For `urld`. +``` +dub fetch urld +dub build urld +``` + +Other thing you need to keep in mind is that both Meson and Dub need +to be using the same compiler. This can be achieved using Dub's +`-compiler` argument and/or manually setting the `DC` environment +variable when running Meson. +``` +dub build urld --compiler=dmd +DC="dmd" meson builddir +``` + +## Config tool + +[CUPS](#cups), [LLVM](#llvm), [pcap](#pcap), [WxWidgets](#wxwidgets), +[libwmf](#libwmf), [GCrypt](#libgcrypt), [GPGME](#gpgme), and GnuStep either do not provide pkg-config +modules or additionally can be detected via a config tool +(cups-config, llvm-config, libgcrypt-config, etc). Meson has native support for these +tools, and they can be found like other dependencies: + +```meson +pcap_dep = dependency('pcap', version : '>=1.0') +cups_dep = dependency('cups', version : '>=1.4') +llvm_dep = dependency('llvm', version : '>=4.0') +libgcrypt_dep = dependency('libgcrypt', version: '>= 1.8') +gpgme_dep = dependency('gpgme', version: '>= 1.0') +``` + +*Since 0.55.0* Meson won't search $PATH any more for a config tool +binary when cross compiling if the config tool did not have an entry +in the cross file. + +# Dependencies with custom lookup functionality + +Some dependencies have specific detection logic. + +Generic dependency names are case-sensitive[1](#footnote1), +but these dependency names are matched case-insensitively. The +recommended style is to write them in all lower-case. + +In some cases, more than one detection method exists, and the `method` +keyword may be used to select a detection method to use. The `auto` +method uses any checking mechanisms in whatever order Meson thinks is +best. + +e.g. libwmf and CUPS provide both pkg-config and config-tool support. +You can force one or another via the `method` keyword: + +```meson +cups_dep = dependency('cups', method : 'pkg-config') +wmf_dep = dependency('libwmf', method : 'config-tool') +``` + +## AppleFrameworks + +Use the `modules` keyword to list frameworks required, e.g. + +```meson +dep = dependency('appleframeworks', modules : 'foundation') +``` + +These dependencies can never be found for non-OSX hosts. + +## Blocks + +Enable support for Clang's blocks extension. + +```meson +dep = dependency('blocks') +``` + +*(added 0.52.0)* + +## Boost + +Boost is not a single dependency but rather a group of different +libraries. To use Boost headers-only libraries, simply add Boost as a +dependency. + +```meson +boost_dep = dependency('boost') +exe = executable('myprog', 'file.cc', dependencies : boost_dep) +``` + +To link against boost with Meson, simply list which libraries you +would like to use. + +```meson +boost_dep = dependency('boost', modules : ['thread', 'utility']) +exe = executable('myprog', 'file.cc', dependencies : boost_dep) +``` + +You can call `dependency` multiple times with different modules and +use those to link against your targets. + +If your boost headers or libraries are in non-standard locations you +can set the `BOOST_ROOT`, or the `BOOST_INCLUDEDIR` and +`BOOST_LIBRARYDIR` environment variables. *(added in 0.56.0)* You can +also set these parameters as `boost_root`, `boost_include`, and +`boost_librarydir` in your native or cross machine file. Note that +machine file variables are preferred to environment variables, and +that specifying any of these disables system-wide search for boost. + +You can set the argument `threading` to `single` to use boost +libraries that have been compiled for single-threaded use instead. + +## CUDA + +*(added 0.53.0)* + +Enables compiling and linking against the CUDA Toolkit. The `version` +and `modules` keywords may be passed to request the use of a specific +CUDA Toolkit version and/or additional CUDA libraries, correspondingly: + +```meson +dep = dependency('cuda', version : '>=10', modules : ['cublas']) +``` + +Note that explicitly adding this dependency is only necessary if you are +using CUDA Toolkit from a C/C++ file or project, or if you are utilizing +additional toolkit libraries that need to be explicitly linked to. + +## CUPS + +`method` may be `auto`, `config-tool`, `pkg-config`, `cmake` or `extraframework`. + +## Curses + +*(Since 0.54.0)* + +Curses (and ncurses) are a cross platform pain in the butt. Meson +wraps up these dependencies in the `curses` dependency. This covers +both `ncurses` (preferred) and other curses implementations. + +`method` may be `auto`, `pkg-config`, `config-tool`, or `system`. + +*New in 0.56.0* The `config-tool` and `system` methods. + +To define some of the the preprocessor symbols mentioned in the +[curses autoconf documentation](http://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_with_curses.m4): + +```meson +conf = configuration_data() +check_headers = [ + ['ncursesw/menu.h', 'HAVE_NCURSESW_MENU_H'], + ['ncurses/menu.h', 'HAVE_NCURSES_MENU_H'], + ['menu.h', 'HAVE_MENU_H'], + ['ncursesw/curses.h', 'HAVE_NCURSESW_CURSES_H'], + ['ncursesw.h', 'HAVE_NCURSESW_H'], + ['ncurses/curses.h', 'HAVE_NCURSES_CURSES_H'], + ['ncurses.h', 'HAVE_NCURSES_H'], + ['curses.h', 'HAVE_CURSES_H'], +] + +foreach h : check_headers + if compiler.has_header(h.get(0)) + conf.set(h.get(1), 1) + endif +endforeach +``` + +## Fortran Coarrays + +*(added 0.50.0)* + + Coarrays are a Fortran language intrinsic feature, enabled by +`dependency('coarray')`. + +GCC will use OpenCoarrays if present to implement coarrays, while Intel and NAG +use internal coarray support. + +## GPGME + +*(added 0.51.0)* + +`method` may be `auto`, `config-tool` or `pkg-config`. + +## GL + +This finds the OpenGL library in a way appropriate to the platform. + +`method` may be `auto`, `pkg-config` or `system`. + +## GTest and GMock + +GTest and GMock come as sources that must be compiled as part of your +project. With Meson you don't have to care about the details, just +pass `gtest` or `gmock` to `dependency` and it will do everything for +you. If you want to use GMock, it is recommended to use GTest as well, +as getting it to work standalone is tricky. + +You can set the `main` keyword argument to `true` to use the `main()` +function provided by GTest: + +```meson +gtest_dep = dependency('gtest', main : true, required : false) +e = executable('testprog', 'test.cc', dependencies : gtest_dep) +test('gtest test', e) +``` + +## HDF5 + +*(added 0.50.0)* + +HDF5 is supported for C, C++ and Fortran. Because dependencies are +language-specific, you must specify the requested language using the +`language` keyword argument, i.e., + * `dependency('hdf5', language: 'c')` for the C HDF5 headers and libraries + * `dependency('hdf5', language: 'cpp')` for the C++ HDF5 headers and libraries + * `dependency('hdf5', language: 'fortran')` for the Fortran HDF5 headers and libraries + +Meson uses pkg-config to find HDF5. The standard low-level HDF5 +function and the `HL` high-level HDF5 functions are linked for each +language. + +`method` may be `auto`, `config-tool` or `pkg-config`. + +*New in 0.56.0* the `config-tool` method. +*New in 0.56.0* the dependencies now return proper dependency types + and `get_variable` and similar methods should work as expected. + +## intl + +*(added 0.59.0)* + +Provides access to the `*gettext` family of C functions. On systems where this +is not built into libc, tries to find an external library providing them +instead. + +`method` may be `auto`, `builtin` or `system`. + +## libgcrypt + +*(added 0.49.0)* + +`method` may be `auto`, `config-tool` or `pkg-config`. + +## libwmf + +*(added 0.44.0)* + +`method` may be `auto`, `config-tool` or `pkg-config`. + +## LLVM + +Meson has native support for LLVM going back to version LLVM version +3.5. It supports a few additional features compared to other +config-tool based dependencies. + +As of 0.44.0 Meson supports the `static` keyword argument for LLVM. +Before this LLVM >= 3.9 would always dynamically link, while older +versions would statically link, due to a quirk in `llvm-config`. + +`method` may be `auto`, `config-tool`, or `cmake`. + +### Modules, a.k.a. Components + +Meson wraps LLVM's concept of components in it's own modules concept. +When you need specific components you add them as modules as Meson +will do the right thing: + +```meson +llvm_dep = dependency('llvm', version : '>= 4.0', modules : ['amdgpu']) +``` + +As of 0.44.0 it can also take optional modules (these will affect the arguments +generated for a static link): + +```meson +llvm_dep = dependency( + 'llvm', version : '>= 4.0', modules : ['amdgpu'], optional_modules : ['inteljitevents'], +) +``` + +### Using LLVM tools + +When using LLVM as library but also needing its tools, it is often +beneficial to use the same version. This can partially be achieved +with the `version` argument of `find_program()`. However, +distributions tend to package different LLVM versions in rather +different ways. Therefore, it is often better to use the llvm +dependency directly to retrieve the tools: + +```meson +llvm_dep = dependency('llvm', version : ['>= 8', '< 9']) +llvm_link = find_program(llvm_dep.get_variable(configtool: 'bindir') / 'llvm-link') +``` + +## MPI + +*(added 0.42.0)* + +MPI is supported for C, C++ and Fortran. Because dependencies are +language-specific, you must specify the requested language using the +`language` keyword argument, i.e., + * `dependency('mpi', language: 'c')` for the C MPI headers and libraries + * `dependency('mpi', language: 'cpp')` for the C++ MPI headers and libraries + * `dependency('mpi', language: 'fortran')` for the Fortran MPI headers and libraries + +Meson prefers pkg-config for MPI, but if your MPI implementation does +not provide them, it will search for the standard wrapper executables, +`mpic`, `mpicxx`, `mpic++`, `mpifort`, `mpif90`, `mpif77`. If these +are not in your path, they can be specified by setting the standard +environment variables `MPICC`, `MPICXX`, `MPIFC`, `MPIF90`, or +`MPIF77`, during configuration. It will also try to use the Microsoft +implementation on windows via the `system` method. + +`method` may be `auto`, `config-tool`, `pkg-config` or `system`. + +*New in 0.54.0* The `config-tool` and `system` method values. Previous +versions would always try `pkg-config`, then `config-tool`, then `system`. + +## NetCDF + +*(added 0.50.0)* + +NetCDF is supported for C, C++ and Fortran. Because NetCDF dependencies are +language-specific, you must specify the requested language using the +`language` keyword argument, i.e., + * `dependency('netcdf', language: 'c')` for the C NetCDF headers and libraries + * `dependency('netcdf', language: 'cpp')` for the C++ NetCDF headers and libraries + * `dependency('netcdf', language: 'fortran')` for the Fortran NetCDF headers and libraries + +Meson uses pkg-config to find NetCDF. + +## OpenMP + +*(added 0.46.0)* + +This dependency selects the appropriate compiler flags and/or libraries to use +for OpenMP support. + +The `language` keyword may used. + +## pcap + +*(added 0.42.0)* + +`method` may be `auto`, `config-tool` or `pkg-config`. + +## Python3 + +Python3 is handled specially by Meson: +1. Meson tries to use `pkg-config`. +2. If `pkg-config` fails Meson uses a fallback: + - On Windows the fallback is the current `python3` interpreter. + - On OSX the fallback is a framework dependency from `/Library/Frameworks`. + +Note that `python3` found by this dependency might differ from the one +used in `python3` module because modules uses the current interpreter, +but dependency tries `pkg-config` first. + +`method` may be `auto`, `extraframework`, `pkg-config` or `sysconfig` + +## Qt4 & Qt5 + +Meson has native Qt support. Its usage is best demonstrated with an +example. + +```meson +qt5_mod = import('qt5') +qt5widgets = dependency('qt5', modules : 'Widgets') + +processed = qt5_mod.preprocess( + moc_headers : 'mainWindow.h', # Only headers that need moc should be put here + moc_sources : 'helperFile.cpp', # must have #include"moc_helperFile.cpp" + ui_files : 'mainWindow.ui', + qresources : 'resources.qrc', +) + +q5exe = executable('qt5test', + sources : ['main.cpp', + 'mainWindow.cpp', + processed], + dependencies: qt5widgets) +``` + +Here we have an UI file created with Qt Designer and one source and +header file each that require preprocessing with the `moc` tool. We +also define a resource file to be compiled with `rcc`. We just have to +tell Meson which files are which and it will take care of invoking all +the necessary tools in the correct order, which is done with the +`preprocess` method of the `qt5` module. Its output is simply put in +the list of sources for the target. The `modules` keyword of +`dependency` works just like it does with Boost. It tells which +subparts of Qt the program uses. + +You can set the `main` keyword argument to `true` to use the +`WinMain()` function provided by qtmain static library (this argument +does nothing on platforms other than Windows). + +Setting the optional `private_headers` keyword to true adds the +private header include path of the given module(s) to the compiler +flags. (since v0.47.0) + +**Note** using private headers in your project is a bad idea, do so at +your own risk. + +`method` may be `auto`, `pkg-config` or `qmake`. + +## SDL2 + +SDL2 can be located using `pkg-confg`, the `sdl2-config` config tool, +or as an OSX framework. + +`method` may be `auto`, `config-tool`, `extraframework` or +`pkg-config`. + +## Shaderc + +*(added 0.51.0)* + +Shaderc currently does not ship with any means of detection. +Nevertheless, Meson can try to detect it using `pkg-config`, but will +default to looking for the appropriate library manually. If the +`static` keyword argument is `true`, `shaderc_combined` is preferred. +Otherwise, `shaderc_shared` is preferred. Note that it is not possible +to obtain the shaderc version using this method. + +`method` may be `auto`, `pkg-config` or `system`. + +## Threads + +This dependency selects the appropriate compiler flags and/or +libraries to use for thread support. + +See [threads](Threads.md). + +## Valgrind + +Meson will find valgrind using `pkg-config`, but only uses the +compilation flags and avoids trying to link with it's non-PIC static +libs. + +## Vulkan + +*(added 0.42.0)* + +Vulkan can be located using `pkg-config`, or the `VULKAN_SDK` +environment variable. + +`method` may be `auto`, `pkg-config` or `system`. + +## WxWidgets + +Similar to [Boost](#boost), WxWidgets is not a single library but rather +a collection of modules. WxWidgets is supported via `wx-config`. +Meson substitutes `modules` to `wx-config` invocation, it generates +- `compile_args` using `wx-config --cxxflags $modules...` +- `link_args` using `wx-config --libs $modules...` + +### Example + +```meson +wx_dep = dependency( + 'wxwidgets', version : '>=3.0.0', modules : ['std', 'stc'], +) +``` + +```shell +# compile_args: +$ wx-config --cxxflags std stc + +# link_args: +$ wx-config --libs std stc +``` + +## Zlib + +Zlib ships with pkg-config and cmake support, but on some operating +systems (windows, macOs, FreeBSD, dragonflybsd), it is provided as +part of the base operating system without pkg-config support. The new +System finder can be used on these OSes to link with the bundled +version. + +`method` may be `auto`, `pkg-config`, `cmake`, or `system`. + +*New in 0.54.0* the `system` method. + +
+1: They may appear to be case-insensitive, if the + underlying file system happens to be case-insensitive. diff --git a/meson/docs/markdown/Design-rationale.md b/meson/docs/markdown/Design-rationale.md new file mode 100644 index 000000000..462129e9c --- /dev/null +++ b/meson/docs/markdown/Design-rationale.md @@ -0,0 +1,261 @@ +--- +title: Design rationale +... + +This is the original design rationale for Meson. The syntax it +describes does not match the released version +== + +A software developer's most important tool is the editor. If you talk +to coders about the editors they use, you are usually met with massive +enthusiasm and praise. You will hear how Emacs is the greatest thing +ever or how vi is so elegant or how Eclipse's integration features +make you so much more productive. You can sense the enthusiasm and +affection that the people feel towards these programs. + +The second most important tool, even more important than the compiler, +is the build system. + +Those are pretty much universally despised. + +The most positive statement on build systems you can usually get (and +it might require some coaxing) is something along the lines of *well, +it's a terrible system, but all other options are even worse*. It is +easy to see why this is the case. For starters, commonly used free +build systems have obtuse syntaxes. They use for the most part global +variables that are set in random locations so you can never really be +sure what a given line of code does. They do strange and unpredictable +things at every turn. + +Let's illustrate this with a simple example. Suppose we want to run a +program built with GNU Autotools under GDB. The instinctive thing to +do is to just run `gdb programname`. The problem is that this may or +may not work. In some cases the executable file is a binary whereas at +other times it is a wrapper shell script that invokes the real binary +which resides in a hidden subdirectory. GDB invocation fails if the +binary is a script but succeeds if it is not. The user has to remember +the type of each one of his executables (which is an implementation +detail of the build system) just to be able to debug them. Several +other such pain points can be found in [this blog +post](http://voices.canonical.com/jussi.pakkanen/2011/09/13/autotools/). + +Given these idiosyncrasies it is no wonder that most people don't want +to have anything to do with build systems. They'll just copy-paste +code that works (somewhat) in one place to another and hope for the +best. They actively go out of their way not to understand the system +because the mere thought of it is repulsive. Doing this also provides +a kind of inverse job security. If you don't know tool X, there's less +chance of finding yourself responsible for its use in your +organisation. Instead you get to work on more enjoyable things. + +This leads to a vicious circle. Since people avoid the tools and don't +want to deal with them, very few work on improving them. The result is +apathy and stagnation. + +Can we do better? +-- + +At its core, building C and C++ code is not a terribly difficult +task. In fact, writing a text editor is a lot more complicated and +takes more effort. Yet we have lots of very high quality editors but +only few build systems with questionable quality and usability. + +So, in the grand tradition of own-itch-scratching, I decided to run a +scientific experiment. The purpose of this experiment was to explore +what would it take to build a "good" build system. What kind of syntax +would suit this problem? What sort of problems would this application +need to solve? What sort of solutions would be the most appropriate? + +To get things started, here is a list of requirements any modern +cross-platform build system needs to provide. + +### 1. Must be simple to use + +One of the great virtues of Python is the fact that it is very +readable. It is easy to see what a given block of code does. It is +concise, clear and easy to understand. The proposed build system must +be syntactically and semantically clean. Side effects, global state +and interrelations must be kept at a minimum or, if possible, +eliminated entirely. + +### 2. Must do the right thing by default + +Most builds are done by developers working on the code. Therefore the +defaults must be tailored towards that use case. As an example the +system shall build objects without optimization and with debug +information. It shall make binaries that can be run directly from the +build directory without linker tricks, shell scripts or magic +environment variables. + +### 3. Must enforce established best practices + +There really is no reason to compile source code without the +equivalent of `-Wall`. So enable it by default. A different kind of +best practice is the total separation of source and build +directories. All build artifacts must be stored in the build +directory. Writing stray files in the source directory is not +permitted under any circumstances. + +### 4. Must have native support for platforms that are in common use + +A lot of free software projects can be used on non-free platforms such +as Windows or OSX. The system must provide native support for the +tools of choice on those platforms. In practice this means native +support for Visual Studio and XCode. Having said IDEs invoke external +builder binaries does not count as native support. + +### 5. Must not add complexity due to obsolete platforms + +Work on this build system started during the Christmas holidays of 2012. +This provides a natural hard cutoff line of 2012/12/24. Any +platform, tool or library that was not in active use at that time is +explicitly not supported. These include Unixes such as IRIX, SunOS, +OSF-1, Ubuntu versions older than 12/10, GCC versions older than 4.7 +and so on. If these old versions happen to work, great. If they don't, +not a single line of code will be added to the system to work around +their bugs. + +### 6. Must be fast + +Running the configuration step on a moderate sized project must not +take more than five seconds. Running the compile command on a fully up +to date tree of 1000 source files must not take more than 0.1 seconds. + +### 7. Must provide easy to use support for modern sw development features + +An example is precompiled headers. Currently no free software build +system provides native support for them. Other examples could include +easy integration of Valgrind and unit tests, test coverage reporting +and so on. + +### 8. Must allow override of default values + +Sometimes you just have to compile files with only given compiler +flags and no others, or install files in weird places. The system must +allow the user to do this if he really wants to. + +Overview of the solution +-- + +Going over these requirements it becomes quite apparent that the only +viable approach is roughly the same as taken by CMake: having a domain +specific language to declare the build system. Out of this declaration +a configuration is generated for the backend build system. This can be +a Makefile, Visual Studio or XCode project or anything else. + +The difference between the proposed DSL and existing ones is that the +new one is declarative. It also tries to work on a higher level of +abstraction than existing systems. As an example, using external +libraries in current build systems means manually extracting and +passing around compiler flags and linker flags. In the proposed system +the user just declares that a given build target uses a given external +dependency. The build system then takes care of passing all flags and +settings to their proper locations. This means that the user can focus +on his own code rather than marshalling command line arguments from +one place to another. + +A DSL is more work than the approach taken by SCons, which is to +provide the system as a Python library. However it allows us to make +the syntax more expressive and prevent certain types of bugs by +e.g. making certain objects truly immutable. The end result is again +the same: less work for the user. + +The backend for Unix requires a bit more thought. The default choice +would be Make. However it is extremely slow. It is not uncommon on +large code bases for Make to take several minutes just to determine +that nothing needs to be done. Instead of Make we use +[Ninja](https://ninja-build.org/), which is extremely fast. The +backend code is abstracted away from the core, so other backends can +be added with relatively little effort. + +Sample code +-- + +Enough design talk, let's get to the code. Before looking at the +examples we would like to emphasize that this is not in any way the +final code. It is proof of concept code that works in the system as it +currently exists (February 2013), but may change at any time. + +Let's start simple. Here is the code to compile a single executable +binary. + +```meson +project('compile one', 'c') +executable('program', 'prog.c') +``` + +This is about as simple as one can get. First you declare the project +name and the languages it uses. Then you specify the binary to build +and its sources. The build system will do all the rest. It will add +proper suffixes (e.g. '.exe' on Windows), set the default compiler +flags and so on. + +Usually programs have more than one source file. Listing them all in +the function call can become unwieldy. That is why the system supports +keyword arguments. They look like this. + +```meson +project('compile several', 'c') +sourcelist = ['main.c', 'file1.c', 'file2.c', 'file3.c'] +executable('program', sources : sourcelist) +``` + +External dependencies are simple to use. + +```meson +project('external lib', 'c') +libdep = find_dep('extlibrary', required : true) +sourcelist = ['main.c', 'file1.c', 'file2.c', 'file3.c'] +executable('program', sources : sourcelist, dep : libdep) +``` + +In other build systems you have to manually add the compile and link +flags from external dependencies to targets. In this system you just +declare that extlibrary is mandatory and that the generated program +uses that. The build system does all the plumbing for you. + +Here's a slightly more complicated definition. It should still be +understandable. + +```meson +project('build library', 'c') +foolib = shared_library('foobar', sources : 'foobar.c',\ +install : true) +exe = executable('testfoobar', 'tester.c', link : foolib) +add_test('test library', exe) +``` + +First we build a shared library named foobar. It is marked +installable, so running `meson install` installs it to the library +directory (the system knows which one so the user does not have to +care). Then we build a test executable which is linked against the +library. It will not be installed, but instead it is added to the list +of unit tests, which can be run with the command `meson test`. + +Above we mentioned precompiled headers as a feature not supported by +other build systems. Here's how you would use them. + +```meson +project('pch demo', 'cxx') +executable('myapp', 'myapp.cpp', pch : 'pch/myapp.hh') +``` + +The main reason other build systems can not provide pch support this +easily is because they don't enforce certain best practices. Due to +the way include paths work, it is impossible to provide pch support +that always works with both in-source and out-of-source +builds. Mandating separate build and source directories makes this and +many other problems a lot easier. + +Get the code +-- + +The code for this experiment can be found at [the Meson +repository](https://sourceforge.net/p/meson/code/). It should be noted +that it is not a build system. It is only a proposal for one. It does +not work reliably yet. You probably should not use it as the build +system of your project. + +All that said I hope that this experiment will eventually turn into a +full blown build system. For that I need your help. Comments and +especially patches are more than welcome. diff --git a/meson/docs/markdown/Disabler.md b/meson/docs/markdown/Disabler.md new file mode 100644 index 000000000..4aed7ad65 --- /dev/null +++ b/meson/docs/markdown/Disabler.md @@ -0,0 +1,68 @@ +--- +short-description: Disabling options +... + +# Disabling parts of the build + +*This feature is available since version 0.44.0.* + +The following is a common fragment found in many projects: + +```meson +dep = dependency('foo') + +# In some different directory + +lib = shared_library('mylib', 'mylib.c', + dependencies : dep) + +# And ín a third directory + +exe = executable('mytest', 'mytest.c', + link_with : lib) +test('mytest', exe) +``` + +This works fine but gets a bit inflexible when you want to make this +part of the build optional. Basically it reduces to adding `if/else` +statements around all target invocations. Meson provides a simpler way +of achieving the same with a disabler object. + +A disabler object is created with the `disabler` function: + +```meson +d = disabler() +``` + +The only thing you can do to a disabler object is to ask if it has +been found: + +```meson +f = d.found() # returns false +``` + +Any other statement that uses a disabler object will immediately +return a disabler. For example assuming that `d` contains a disabler +object then + +```meson +d2 = some_func(d) # value of d2 will be disabler +d3 = true or d2 # value of d3 will be true because of short-circuiting +d4 = false or d2 # value of d4 will be disabler +if d # neither branch is evaluated +``` + +Thus to disable every target that depends on the dependency given +above, you can do something like this: + +```meson +if use_foo_feature + d = dependency('foo') +else + d = disabler() +endif +``` + +This concentrates the handling of this option in one place and other +build definition files do not need to be sprinkled with `if` +statements. diff --git a/meson/docs/markdown/Dlang-module.md b/meson/docs/markdown/Dlang-module.md new file mode 100644 index 000000000..915f3a79e --- /dev/null +++ b/meson/docs/markdown/Dlang-module.md @@ -0,0 +1,44 @@ +# Dlang module + +This module provides tools related to the D programming language. + +## Usage + +To use this module, just do: **`dlang = import('dlang')`**. +You can, of course, replace the name `dlang` with anything else. + +The module only exposes one function, `generate_dub_file`, used to +automatically generate Dub configuration files. + +### generate_dub_file() +This method only has two required arguments, the project name and the +source folder. You can pass other arguments with additional keywords, +they will be automatically translated to json and added to the +`dub.json` file. + +**Structure** +```meson +generate_dub_file("project name", "source/folder", key: "value" ...) +``` + +**Example** +```meson +dlang = import('dlang') +dlang.generate_dub_file(meson.project_name().to_lower(), meson.source_root(), + authors: 'Meson Team', + description: 'Test executable', + copyright: 'Copyright © 2018, Meson Team', + license: 'MIT', + sourceFiles: 'test.d', + targetType: 'executable', + dependencies: my_dep +) +``` + +You can manually edit a Meson generated `dub.json` file or provide a +initial one. The module will only update the values specified in +`generate_dub_file()`. + +Although not required, you will need to have a `description` and +`license` if you want to publish the package in the [D package +registry](https://code.dlang.org/). diff --git a/meson/docs/markdown/External-Project-module.md b/meson/docs/markdown/External-Project-module.md new file mode 100644 index 000000000..866564ea6 --- /dev/null +++ b/meson/docs/markdown/External-Project-module.md @@ -0,0 +1,128 @@ +# External Project module + +**Note**: the functionality of this module is governed by [Meson's + rules on mixing build systems](Mixing-build-systems.md). + +*This is an experimental module, API could change.* + +This module allows building code that uses build systems other than +Meson. This module is intended to be used to build Autotools +subprojects as fallback if the dependency couldn't be found on the +system (e.g. too old distro version). + +The project will be compiled out-of-tree inside Meson's build +directory. The project will also be installed inside Meson's build +directory using make's +[`DESTDIR`](https://www.gnu.org/prep/standards/html_node/DESTDIR.html) +feature. During project installation step, that DESTDIR will be copied +verbatim into the desired location. + +External subprojects can use libraries built by Meson (main project, +or other subprojects) using pkg-config, thanks to `*-uninstalled.pc` +files generated by [`pkg.generate()`](Pkgconfig-module.md). + +External build system requirements: + +- Must support out-of-tree build. The configure script will be invoked with the + current workdir inside Meson's build directory and not subproject's top source + directory. +- Configure script must generate a `Makefile` in the current workdir. +- Configure script must take common directories like prefix, libdir, etc, as + command line arguments. +- Configure script must support common environment variable like CFLAGS, CC, etc. +- Compilation step must detect when a reconfigure is needed, and do it + transparently. + +Known limitations: + +- Executables from external projects cannot be used uninstalled, because they + would need its libraries to be installed in the final location. This is why + there is no `find_program()` method. +- The configure script must generate a `Makefile`, other build systems are not + yet supported. +- When cross compiling, if `PKG_CONFIG_SYSROOT_DIR` is set in environment or + `sys_root` in the cross file properties, the external subproject will not be + able to find dependencies built by Meson using pkg-config. The reason is + pkg-config and pkgconf both prepend the sysroot path to `-I` and `-L` arguments + from `-uninstalled.pc` files. This is arguably a bug that could be fixed in + future version of pkg-config/pkgconf. + +*Added 0.56.0* + +## Functions + +### `add_project()` + +This function should be called at the root directory of a project +using another build system. Usually in a `meson.build` file placed in +the top directory of a subproject, but could be also in any subdir. + +Its first positional argument is the name of the configure script to +be executed (e.g. `configure`), that file must be in the current +directory and executable. Note that if a bootstrap script is required +(e.g. `autogen.sh` when building from git instead of tarball), it can +be done using `run_command()` before calling `add_project()` method. + +Keyword arguments: + +- `configure_options`: An array of strings to be passed as arguments to the + configure script. Some special tags will be replaced by Meson before passing + them to the configure script: `@PREFIX@`, `@LIBDIR@` and `@INCLUDEDIR@`. + Note that `libdir` and `includedir` paths are relative to `prefix` in Meson + but some configure scripts requires absolute path, in that case they can be + passed as `'--libdir=@PREFIX@/@LIBDIR@'`. *Since 0.57.0* default arguments are + added in case some tags are not found in `configure_options`: + `'--prefix=@PREFIX@'`, `'--libdir=@PREFIX@/@LIBDIR@'`, and + `'--includedir=@PREFIX@/@INCLUDEDIR@'`. It was previously considered a fatal + error to not specify them. +- `cross_configure_options`: Extra options appended to `configure_options` only + when cross compiling. special tag `@HOST@` will be replaced by + `'{}-{}-{}'.format(host_machine.cpu_family(), build_machine.system(), host_machine.system()`. + If omitted it defaults to `['--host=@HOST@']`. +- `verbose`: If set to `true` the output of sub-commands ran to configure, build + and install the project will be printed onto Meson's stdout. +- `env` : environment variables to set, such as `['NAME1=value1', 'NAME2=value2']`, + a dictionary, or an [`environment()` object](Reference-manual.md#environment-object). + +Returns an [`ExternalProject`](#ExternalProject_object) object + +## `ExternalProject` object + +### Methods + +#### `dependency(libname)` + +Return a dependency object that can be used to build targets against a library +from the external project. + +Keyword arguments: +- `subdir` path relative to `includedir` to be added to the header search path. + +## Example `meson.build` file for a subproject + +```meson +project('My Autotools Project', 'c', + meson_version : '>=0.56.0', +) + +mod = import('unstable_external_project') + +p = mod.add_project('configure', + configure_options : ['--prefix=@PREFIX@', + '--libdir=@LIBDIR@', + '--incdir=@INCLUDEDIR@', + '--enable-foo', + ], +) + +mylib_dep = p.dependency('mylib') +``` + +## Using wrap file + +Most of the time the project will be built as a subproject, and +fetched using a `.wrap` file. In that case the simple `meson.build` +file needed to build the subproject can be provided by adding +`patch_directory=mysubproject` line in the wrap file, and place the +build definition file at +`subprojects/packagefiles/mysubproject/meson.build`. diff --git a/meson/docs/markdown/External-commands.md b/meson/docs/markdown/External-commands.md new file mode 100644 index 000000000..be9d171b6 --- /dev/null +++ b/meson/docs/markdown/External-commands.md @@ -0,0 +1,52 @@ +--- +short-description: Running external commands +... + +# External commands + +As a part of the software configuration, you may want to get extra +data by running external commands. The basic syntax is the following. + +```meson +r = run_command('command', 'arg1', 'arg2', 'arg3') +if r.returncode() != 0 + # it failed +endif +output = r.stdout().strip() +errortxt = r.stderr().strip() +``` + +Since 0.52.0, you can pass the command environment as a dictionary: + +```meson +run_command('command', 'arg1', 'arg2', env: {'FOO': 'bar'}) +``` + +Since 0.50.0, you can also pass the command +[`environment`](Reference-manual.md#environment-object) object: + +```meson +env = environment() +env.set('FOO', 'bar') +run_command('command', 'arg1', 'arg2', env: env) +``` + +The `run_command` function returns an object that can be queried for +return value and text written to stdout and stderr. The `strip` method +call is used to strip trailing and leading whitespace from strings. +Usually output from command line programs ends in a newline, which is +unwanted in string variables. The first argument can be either a +string or an executable you have detected earlier with `find_program`. + +Meson will autodetect scripts with a shebang line and run them with +the executable/interpreter specified in it both on Windows and on +Unixes. + +Note that you can not pass your command line as a single string. That +is, calling `run_command('do_something foo bar')` will not work. You +must either split up the string into separate arguments or pass the +split command as an array. It should also be noted that Meson will not +pass the command to the shell, so any command lines that try to use +things such as environment variables, backticks or pipelines will not +work. If you require shell semantics, write your command into a script +file and call that with `run_command`. diff --git a/meson/docs/markdown/FAQ.md b/meson/docs/markdown/FAQ.md new file mode 100644 index 000000000..0ed731e51 --- /dev/null +++ b/meson/docs/markdown/FAQ.md @@ -0,0 +1,631 @@ +--- +title: FAQ +... +# Meson Frequently Asked Questions + +See also [How do I do X in Meson](howtox.md). + +## Why is it called Meson? + +When the name was originally chosen, there were two main limitations: +there must not exist either a Debian package or a Sourceforge project +of the given name. This ruled out tens of potential project names. At +some point the name Gluon was considered. Gluons are elementary +particles that hold protons and neutrons together, much like a build +system's job is to take pieces of source code and a compiler and bind +them to a complete whole. + +Unfortunately this name was taken, too. Then the rest of subatomic +particles were examined and Meson was found to be available. + +## What is the correct way to use threads (such as pthreads)? + +```meson +thread_dep = dependency('threads') +``` + +This will set up everything on your behalf. People coming from +Autotools or CMake want to do this by looking for `libpthread.so` +manually. Don't do that, it has tricky corner cases especially when +cross compiling. + +## How to use Meson on a host where it is not available in system packages? + +Starting from version 0.29.0, Meson is available from the [Python +Package Index](https://pypi.python.org/pypi/meson/), so installing it +simply a matter of running this command: + +```console +$ pip3 install meson +``` + +If you don't have access to PyPI, that is not a problem either. Meson +has been designed to be easily runnable from an extracted source +tarball or even a git checkout. First you need to download Meson. Then +use this command to set up you build instead of plain `meson`. + +```console +$ /path/to/meson.py +``` + +After this you don't have to care about invoking Meson any more. It +remembers where it was originally invoked from and calls itself +appropriately. As a user the only thing you need to do is to `cd` into +your build directory and invoke `meson compile`. + +## Why can't I specify target files with a wildcard? + +Instead of specifying files explicitly, people seem to want to do this: + +```meson +executable('myprog', sources : '*.cpp') # This does NOT work! +``` + +Meson does not support this syntax and the reason for this is simple. +This can not be made both reliable and fast. By reliable we mean that +if the user adds a new source file to the subdirectory, Meson should +detect that and make it part of the build automatically. + +One of the main requirements of Meson is that it must be fast. This +means that a no-op build in a tree of 10 000 source files must take no +more than a fraction of a second. This is only possible because Meson +knows the exact list of files to check. If any target is specified as +a wildcard glob, this is no longer possible. Meson would need to +re-evaluate the glob every time and compare the list of files produced +against the previous list. This means inspecting the entire source +tree (because the glob pattern could be `src/\*/\*/\*/\*.cpp` or +something like that). This is impossible to do efficiently. + +The main backend of Meson is Ninja, which does not support wildcard +matches either, and for the same reasons. + +Because of this, all source files must be specified explicitly. + +## But I really want to use wildcards! + +If the tradeoff between reliability and convenience is acceptable to +you, then Meson gives you all the tools necessary to do wildcard +globbing. You are allowed to run arbitrary commands during +configuration. First you need to write a script that locates the files +to compile. Here's a simple shell script that writes all `.c` files in +the current directory, one per line. + + +```bash +#!/bin/sh + +for i in *.c; do + echo $i +done +``` + +Then you need to run this script in your Meson file, convert the +output into a string array and use the result in a target. + +```meson +c = run_command('grabber.sh') +sources = c.stdout().strip().split('\n') +e = executable('prog', sources) +``` + +The script can be any executable, so it can be written in shell, +Python, Lua, Perl or whatever you wish. + +As mentioned above, the tradeoff is that just adding new files to the +source directory does *not* add them to the build automatically. To +add them you need to tell Meson to reinitialize itself. The simplest +way is to touch the `meson.build` file in your source root. Then Meson +will reconfigure itself next time the build command is run. Advanced +users can even write a small background script that utilizes a +filesystem event queue, such as +[inotify](https://en.wikipedia.org/wiki/Inotify), to do this +automatically. + +## Should I use `subdir` or `subproject`? + +The answer is almost always `subdir`. Subproject exists for a very +specific use case: embedding external dependencies into your build +process. As an example, suppose we are writing a game and wish to use +SDL. Let us further suppose that SDL comes with a Meson build +definition. Let us suppose even further that we don't want to use +prebuilt binaries but want to compile SDL for ourselves. + +In this case you would use `subproject`. The way to do it would be to +grab the source code of SDL and put it inside your own source +tree. Then you would do `sdl = subproject('sdl')`, which would cause +Meson to build SDL as part of your build and would then allow you to +link against it or do whatever else you may prefer. + +For every other use you would use `subdir`. As an example, if you +wanted to build a shared library in one dir and link tests against it +in another dir, you would do something like this: + +```meson +project('simple', 'c') +subdir('src') # library is built here +subdir('tests') # test binaries would link against the library here +``` + +## Why is there not a Make backend? + +Because Make is slow. This is not an implementation issue, Make simply +can not be made fast. For further info we recommend you read [this +post](http://neugierig.org/software/chromium/notes/2011/02/ninja.html) +by Evan Martin, the author of Ninja. Makefiles also have a syntax that +is very unpleasant to write which makes them a big maintenance burden. + +The only reason why one would use Make instead of Ninja is working on +a platform that does not have a Ninja port. Even in this case it is an +order of magnitude less work to port Ninja than it is to write a Make +backend for Meson. + +Just use Ninja, you'll be happier that way. I guarantee it. + +## Why is Meson not just a Python module so I could code my build setup in Python? + +A related question to this is *Why is Meson's configuration language +not Turing-complete?* + +There are many good reasons for this, most of which are summarized on +this web page: [Against The Use Of Programming Languages in +Configuration Files](https://taint.org/2011/02/18/001527a.html). + +In addition to those reasons, not exposing Python or any other "real" +programming language makes it possible to port Meson's implementation +to a different language. This might become necessary if, for example, +Python turns out to be a performance bottleneck. This is an actual +problem that has caused complications for GNU Autotools and SCons. + +## How do I do the equivalent of Libtools export-symbol and export-regex? + +Either by using [GCC symbol +visibility](https://gcc.gnu.org/wiki/Visibility) or by writing a +[linker +script](https://ftp.gnu.org/old-gnu/Manuals/ld-2.9.1/html_mono/ld.html). This +has the added benefit that your symbol definitions are in a standalone +file instead of being buried inside your build definitions. An example +can be found +[here](https://github.com/jpakkane/meson/tree/master/test%20cases/linuxlike/3%20linker%20script). + +## My project works fine on Linux and MinGW but fails to link with MSVC due to a missing .lib file (fatal error LNK1181). Why? + +With GCC, all symbols on shared libraries are exported automatically +unless you specify otherwise. With MSVC no symbols are exported by +default. If your shared library exports no symbols, MSVC will silently +not produce an import library file leading to failures. The solution +is to add symbol visibility definitions [as specified in GCC +wiki](https://gcc.gnu.org/wiki/Visibility). + +## I added some compiler flags and now the build fails with weird errors. What is happening? + +You probably did the equivalent to this: + +```meson +executable('foobar', ... + c_args : '-some_arg -other_arg') +``` + +Meson is *explicit*. In this particular case it will **not** +automatically split your strings at whitespaces, instead it will take +it as is and work extra hard to pass it to the compiler unchanged, +including quoting it properly over shell invocations. This is +mandatory to make e.g. files with spaces in them work flawlessly. To +pass multiple command line arguments, you need to explicitly put them +in an array like this: + +```meson +executable('foobar', ... + c_args : ['-some_arg', '-other_arg']) +``` + +## Why are changes to default project options ignored? + +You probably had a project that looked something like this: + +```meson +project('foobar', 'cpp') +``` + +This defaults to `c++11` on GCC compilers. Suppose you want to use +`c++14` instead, so you change the definition to this: + +```meson +project('foobar', 'cpp', default_options : ['cpp_std=c++14']) +``` + +But when you recompile, it still uses `c++11`. The reason for this is +that default options are only looked at when you are setting up a +build directory for the very first time. After that the setting is +considered to have a value and thus the default value is ignored. To +change an existing build dir to `c++14`, either reconfigure your build +dir with `meson configure` or delete the build dir and recreate it +from scratch. + +The reason we don't automatically change the option value when the +default is changed is that it is impossible to know to do that +reliably. The actual question that we need to solve is "if the +option's value is foo and the default value is bar, should we change +the option value to bar also". There are many choices: + + - if the user has changed the value themselves from the default, then + we must not change it back + + - if the user has not changed the value, but changes the default + value, then this section's premise would seem to indicate that the + value should be changed + + - suppose the user changes the value from the default to foo, then + back to bar and then changes the default value to bar, the correct + step to take is ambiguous by itself + +In order to solve the latter question we would need to remember not +only the current and old value, but also all the times the user has +changed the value and from which value to which other value. Since +people don't remember their own actions that far back, toggling +between states based on long history would be confusing. + +Because of this we do the simple and understandable thing: default +values are only defaults and will never affect the value of an option +once set. + +## Does wrap download sources behind my back? + +It does not. In order for Meson to download anything from the net +while building, two conditions must be met. + +First of all there needs to be a `.wrap` file with a download URL in +the `subprojects` directory. If one does not exist, Meson will not +download anything. + +The second requirement is that there needs to be an explicit +subproject invocation in your `meson.build` files. Either +`subproject('foobar')` or `dependency('foobar', fallback : ['foobar', +'foo_dep'])`. If these declarations either are not in any build file +or they are not called (due to e.g. `if/else`) then nothing is +downloaded. + +If this is not sufficient for you, starting from release 0.40.0 Meson +has a option called `wrap-mode` which can be used to disable wrap +downloads altogether with `--wrap-mode=nodownload`. You can also +disable dependency fallbacks altogether with `--wrap-mode=nofallback`, +which also implies the `nodownload` option. + +If on the other hand, you want Meson to always use the fallback +for dependencies, even when an external dependency exists and could +satisfy the version requirements, for example in order to make +sure your project builds when fallbacks are used, you can use +`--wrap-mode=forcefallback` since 0.46.0. + +## Why is Meson implemented in Python rather than [programming language X]? + +Because build systems are special in ways normal applications aren't. + +Perhaps the biggest limitation is that because Meson is used to build +software at the very lowest levels of the OS, it is part of the core +bootstrap for new systems. Whenever support for a new CPU architecture +is added, Meson must run on the system before software using it can be +compiled natively. This requirement adds two hard limitations. + +The first one is that Meson must have the minimal amount of +dependencies, because they must all be built during the bootstrap to +get Meson to work. + +The second is that Meson must support all CPU architectures, both +existing and future ones. As an example many new programming languages +have only an LLVM based compiler available. LLVM has limited CPU +support compared to, say, GCC, and thus bootstrapping Meson on such +platforms would first require adding new processor support to +LLVM. This is in most cases unfeasible. + +A further limitation is that we want developers on as many platforms +as possible to submit to Meson development using the default tools +provided by their operating system. In practice what this means is +that Windows developers should be able to contribute using nothing but +Visual Studio. + +At the time of writing (April 2018) there are only three languages +that could fulfill these requirements: + + - C + - C++ + - Python + +Out of these we have chosen Python because it is the best fit for our +needs. + +## I have proprietary compiler toolchain X that does not work with Meson, how can I make it work? + +Meson needs to know several details about each compiler in order to +compile code with it. These include things such as which compiler +flags to use for each option and how to detect the compiler from its +output. This information can not be input via a configuration file, +instead it requires changes to Meson's source code that need to be +submitted to Meson master repository. In theory you can run your own +forked version with custom patches, but that's not good use of your +time. Please submit the code upstream so everyone can use the +toolchain. + +The steps for adding a new compiler for an existing language are +roughly the following. For simplicity we're going to assume a C +compiler. + +- Create a new class with a proper name in + `mesonbuild/compilers/c.py`. Look at the methods that other + compilers for the same language have and duplicate what they do. + +- If the compiler can only be used for cross compilation, make sure to + flag it as such (see existing compiler classes for examples). + +- Add detection logic to `mesonbuild/environment.py`, look for a + method called `detect_c_compiler`. + +- Run the test suite and fix issues until the tests pass. + +- Submit a pull request, add the result of the test suite to your MR + (linking an existing page is fine). + +- If the compiler is freely available, consider adding it to the CI + system. + +## Why does building my project with MSVC output static libraries called `libfoo.a`? + +The naming convention for static libraries on Windows is usually +`foo.lib`. Unfortunately, import libraries are also called `foo.lib`. + +This causes filename collisions with the default library type where we +build both shared and static libraries, and also causes collisions +during installation since all libraries are installed to the same +directory by default. + +To resolve this, we decided to default to creating static libraries of +the form `libfoo.a` when building with MSVC. This has the following +advantages: + +1. Filename collisions are completely avoided. +1. The format for MSVC static libraries is `ar`, which is the same as the GNU + static library format, so using this extension is semantically correct. +1. The static library filename format is now the same on all platforms and with + all toolchains. +1. Both Clang and GNU compilers can search for `libfoo.a` when specifying + a library as `-lfoo`. This does not work for alternative naming schemes for + static libraries such as `libfoo.lib`. +1. Since `-lfoo` works out of the box, pkgconfig files will work correctly for + projects built with both MSVC, GCC, and Clang on Windows. +1. MSVC does not have arguments to search for library filenames, and [it does + not care what the extension is](https://docs.microsoft.com/en-us/cpp/build/reference/link-input-files?view=vs-2019), + so specifying `libfoo.a` instead of `foo.lib` does not change the workflow, + and is an improvement since it's less ambiguous. + +If, for some reason, you really need your project to output static +libraries of the form `foo.lib` when building with MSVC, you can set +the +[`name_prefix:`](https://mesonbuild.com/Reference-manual.html#library) +kwarg to `''` and the +[`name_suffix:`](https://mesonbuild.com/Reference-manual.html#library) +kwarg to `'lib'`. To get the default behaviour for each, you can +either not specify the kwarg, or pass `[]` (an empty array) to it. + +## Do I need to add my headers to the sources list like in Autotools? + +Autotools requires you to add private and public headers to the sources list so +that it knows what files to include in the tarball generated by `make dist`. +Meson's `dist` command simply gathers everything committed to your git/hg +repository and adds it to the tarball, so adding headers to the sources list is +pointless. + +Meson uses Ninja which uses compiler dependency information to automatically +figure out dependencies between C sources and headers, so it will rebuild +things correctly when a header changes. + +The only exception to this are generated headers, for which you must [declare +dependencies correctly](#how-do-i-tell-meson-that-my-sources-use-generated-headers). + +If, for whatever reason, you do add non-generated headers to the sources list +of a target, Meson will simply ignore them. + +## How do I tell Meson that my sources use generated headers? + +Let's say you use a [`custom_target()`](https://mesonbuild.com/Reference-manual.html#custom_target) +to generate the headers, and then `#include` them in your C code. Here's how +you ensure that Meson generates the headers before trying to compile any +sources in the build target: + +```meson +libfoo_gen_headers = custom_target('gen-headers', ..., output: 'foo-gen.h') +libfoo_sources = files('foo-utils.c', 'foo-lib.c') +# Add generated headers to the list of sources for the build target +libfoo = library('foo', sources: [libfoo_sources + libfoo_gen_headers]) +``` + +Now let's say you have a new target that links to `libfoo`: + +```meson +libbar_sources = files('bar-lib.c') +libbar = library('bar', sources: libbar_sources, link_with: libfoo) +``` + +This adds a **link-time** dependency between the two targets, but note that the +sources of the targets have **no compile-time** dependencies and can be built +in any order; which improves parallelism and speeds up builds. + +If the sources in `libbar` *also* use `foo-gen.h`, that's a *compile-time* +dependency, and you'll have to add `libfoo_gen_headers` to `sources:` for +`libbar` too: + +```meson +libbar_sources = files('bar-lib.c') +libbar = library('bar', sources: libbar_sources + libfoo_gen_headers, link_with: libfoo) +``` + +Alternatively, if you have multiple libraries with sources that link to +a library and also use its generated headers, this code is equivalent to above: + +```meson +# Add generated headers to the list of sources for the build target +libfoo = library('foo', sources: libfoo_sources + libfoo_gen_headers) + +# Declare a dependency that will add the generated headers to sources +libfoo_dep = declare_dependency(link_with: libfoo, sources: libfoo_gen_headers) + +... + +libbar = library('bar', sources: libbar_sources, dependencies: libfoo_dep) +``` + +**Note:** You should only add *headers* to `sources:` while declaring +a dependency. If your custom target outputs both sources and headers, you can +use the subscript notation to get only the header(s): + +```meson +libfoo_gen_sources = custom_target('gen-headers', ..., output: ['foo-gen.h', 'foo-gen.c']) +libfoo_gen_headers = libfoo_gen_sources[0] + +# Add static and generated sources to the target +libfoo = library('foo', sources: libfoo_sources + libfoo_gen_sources) + +# Declare a dependency that will add the generated *headers* to sources +libfoo_dep = declare_dependency(link_with: libfoo, sources: libfoo_gen_headers) +... +libbar = library('bar', sources: libbar_sources, dependencies: libfoo_dep) +``` + +A good example of a generator that outputs both sources and headers is +[`gnome.mkenums()`](https://mesonbuild.com/Gnome-module.html#gnomemkenums). + +## How do I disable exceptions and RTTI in my C++ project? + +With the `cpp_eh` and `cpp_rtti` options. A typical invocation would +look like this: + +``` +meson -Dcpp_eh=none -Dcpp_rtti=false +``` + +The RTTI option is only available since Meson version 0.53.0. + +## Should I check for `buildtype` or individual options like `debug` in my build files? + +This depends highly on what you actually need to happen. The +´buildtype` option is meant do describe the current build's +_intent_. That is, what it will be used for. Individual options are +for determining what the exact state is. This becomes clearer with a +few examples. + +Suppose you have a source file that is known to miscompile when using +`-O3` and requires a workaround. Then you'd write something like this: + +```meson +if get_option('optimization') == '3' + add_project_arguments('-DOPTIMIZATION_WORKAROUND', ...) +endif +``` + +On the other hand if your project has extra logging and sanity checks +that you would like to be enabled during the day to day development +work (which uses the `debug` buildtype), you'd do this instead: + +```meson +if get_option('buildtype') == 'debug' + add_project_arguments('-DENABLE_EXTRA_CHECKS', ...) +endif +``` + +In this way the extra options are automatically used during +development but are not compiled in release builds. Note that (since +Meson 0.57.0) you can set optimization to, say, 2 in your debug builds +if you want to. If you tried to set this flag based on optimization +level, it would fail in this case. + +## How do I use a library before declaring it? + +This is valid (and good) code: +``` +libA = library('libA', 'fileA.cpp', link_with : []) +libB = library('libB', 'fileB.cpp', link_with : [libA]) +``` +But there is currently no way to get something like this to work: +``` +libB = library('libB', 'fileB.cpp', link_with : [libA]) +libA = library('libA', 'fileA.cpp', link_with : []) +``` +This means that you HAVE to write your `library(...)` calls in the order that the +dependencies flow. While ideas to make arbitrary orders possible exist, they were +rejected because reordering the `library(...)` calls was considered the "proper" +way. See [here](https://github.com/mesonbuild/meson/issues/8178) for the discussion. + +## Why doesn't meson have user defined functions/macros? + +The tl;dr answer to this is that meson's design is focused on solving specific +problems rather than providing a general purpose language to write complex +code solutions in build files. Build systems should be quick to write and +quick to understand, functions muddle this simplicity. + +The long answer is twofold: + +First, Meson aims to provide a rich set of tools that solve specific problems +simply out of the box. This is similar to the "batteries included" mentality of +Python. By providing tools that solve common problems in the simplest way +possible *in* Meson we are solving that problem for everyone instead of forcing +everyone to solve that problem for themselves over and over again, often +badly. One example of this are Meson's dependency wrappers around various +config-tool executables (sdl-config, llvm-config, etc). In other build +systems each user of that dependency writes a wrapper and deals with the +corner cases (or doesn't, as is often the case), in Meson we handle them +internally, everyone gets fixes and the corner cases are ironed out for +*everyone*. Providing user defined functions or macros goes directly against +this design goal. + +Second, functions and macros makes the build system more difficult to reason +about. When you encounter some function call, you can refer to the reference +manual to see that function and its signature. Instead of spending +frustrating hours trying to interpret some bit of m4 or follow long include +paths to figure out what `function1` (which calls `function2`, which calls +`function3`, ad infinitum), you know what the build system is doing. Unless +you're actively developing Meson itself, it's just a tool to orchestrate +building the thing you actually care about. We want you to spend as little +time worrying about build systems as possible so you can spend more time on +*your code*. + +Many times user defined functions are used due to a lack of loops or +because loops are tedious to use in the language. Meson has both arrays/lists +and hashes/dicts natively. Compare the following pseudo code: + +```meson +func(name, sources, extra_args) + executable( + name, + sources, + c_args : extra_args + ) +endfunc + +func(exe1, ['1.c', 'common.c'], []) +func(exe2, ['2.c', 'common.c'], []) +func(exe2_a, ['2.c', 'common.c'], ['-arg']) +``` + +```meson +foreach e : [['1', '1.c', []], + ['2', '2.c', []], + ['2', '2.c', ['-arg']]] + executable( + 'exe' + e[0], + e[1], + c_args : e[2], + ) +endforeach +``` + +The loop is both less code and is much easier to reason about than the function +version is, especially if the function were to live in a separate file, as is +common in other popular build systems. + +Build system DSLs also tend to be badly thought out as generic programming +languages, Meson tries to make it easy to use external scripts or programs +for handling complex problems. While one can't always convert build logic +into a scripting language (or compiled language), when it can be done this is +often a better solution. External languages tend to be well-thought-out and +tested, generally don't regress, and users are more likely to have domain +knowledge about them. They also tend to have better tooling (such as +autocompletion, linting, testing solutions), which make them a lower +maintenance burden over time. diff --git a/meson/docs/markdown/Feature-autodetection.md b/meson/docs/markdown/Feature-autodetection.md new file mode 100644 index 000000000..4d366d939 --- /dev/null +++ b/meson/docs/markdown/Feature-autodetection.md @@ -0,0 +1,39 @@ +--- +short-description: Auto-detection of features like ccache and code coverage +... + +# Feature autodetection + +Meson is designed for high productivity. It tries to do as many things +automatically as it possibly can. + +Ccache +-- + +[Ccache](https://ccache.dev/) is a cache system designed to make +compiling faster. When you run Meson for the first time for a given +project, it checks if Ccache is installed. If it is, Meson will use it +automatically. + +If you do not wish to use Ccache for some reason, just specify your +compiler with environment variables `CC` and/or `CXX` when first +running Meson (remember that once specified the compiler can not be +changed). Meson will then use the specified compiler without Ccache. + +Coverage +-- + +When doing a code coverage build, Meson will check for existence of +the binaries `gcovr`, `lcov` and `genhtml`. If version 3.3 or higher +of the first is found, targets called *coverage-text*, *coverage-xml* +and *coverage-html* are generated. Alternatively, if the latter two +are found, only the target *coverage-html* is generated. Coverage +reports can then be produced simply by calling e.g. `meson compile +coverage-xml`. As a convenience, a high-level *coverage* target is +also generated which will produce all 3 coverage report types, if +possible. + +Note that generating any of the coverage reports described above +requires the tests (i.e. `meson test`) to finish running so the +information about the functions that are called in the tests can be +gathered for the report. diff --git a/meson/docs/markdown/Fs-module.md b/meson/docs/markdown/Fs-module.md new file mode 100644 index 000000000..663aba41b --- /dev/null +++ b/meson/docs/markdown/Fs-module.md @@ -0,0 +1,217 @@ +# FS (filesystem) module + +This module provides functions to inspect the file system. It is +available starting with version 0.53.0. + +Since 0.59.0, all functions accept `files()` objects if they can do something +useful with them (this excludes `exists`, `is_dir`, `is_file`, `is_absolute` +since a `files()` object is always the absolute path to an existing file). + +## File lookup rules + +Non-absolute paths are looked up relative to the directory where the +current `meson.build` file is. + +If specified, a leading `~` is expanded to the user home directory. +Environment variables are not available as is the rule throughout Meson. +That is, $HOME, %USERPROFILE%, $MKLROOT, etc. have no meaning to the Meson +filesystem module. If needed, pass such variables into Meson via command +line options in `meson_options.txt`, native-file or cross-file. + +Where possible, symlinks and parent directory notation are resolved to an +absolute path. + +### exists + +Takes a single string argument and returns true if an entity with that +name exists on the file system. This can be a file, directory or a +special entry such as a device node. + +### is_dir + +Takes a single string argument and returns true if a directory with +that name exists on the file system. + +### is_file + +Takes a single string argument and returns true if an file with that +name exists on the file system. + +### is_symlink + +Takes a single string or (since 0.59.0) `files()` argument and returns true if +the path pointed to by the string is a symbolic link. + +## File Parameters + +### is_absolute + +*since 0.54.0* + +Return a boolean indicating if the path string or (since 0.59.0) `files()` +specified is absolute, WITHOUT expanding `~`. + +Examples: + +```meson +fs.is_absolute('~') # false + +home = fs.expanduser('~') +fs.is_absolute(home) # true + +fs.is_absolute(home / 'foo') # true, even if ~/foo doesn't exist + +fs.is_absolute('foo/bar') # false, even if ./foo/bar exists +``` + +### hash + +The `fs.hash(filename, hash_algorithm)` method returns a string containing +the hexadecimal `hash_algorithm` digest of a file. +`hash_algorithm` is a string; the available hash algorithms include: +md5, sha1, sha224, sha256, sha384, sha512. + +### size + +The `fs.size(filename)` method returns the size of the file in integer bytes. + +### is_samepath + +The `fs.is_samepath(path1, path2)` returns boolean `true` if both +paths resolve to the same path. For example, suppose path1 is a +symlink and path2 is a relative path. If `path1` can be resolved to +`path2`, then `true` is returned. If `path1` is not resolved to +`path2`, `false` is returned. If `path1` or `path2` do not exist, +`false` is returned. + +Examples: + +```meson +x = 'foo.txt' +y = 'sub/../foo.txt' +z = 'bar.txt' # a symlink pointing to foo.txt +j = 'notafile.txt' # non-existent file + +fs.is_samepath(x, y) # true +fs.is_samepath(x, z) # true +fs.is_samepath(x, j) # false + +p = 'foo/bar' +q = 'foo/bar/baz/..' +r = 'buz' # a symlink pointing to foo/bar +s = 'notapath' # non-existent directory + +fs.is_samepath(p, q) # true +fs.is_samepath(p, r) # true +fs.is_samepath(p, s) # false +``` + +## Filename modification + +The files need not actually exist yet for these path string +manipulation methods. + +### expanduser + +*since 0.54.0* + +A path string with a leading `~` is expanded to the user home +directory + +Examples: + +```meson +fs.expanduser('~') # user home directory + +fs.expanduser('~/foo') # /foo +``` + +### as_posix + +*since 0.54.0* + +`fs.as_posix(path)` assumes a Windows path, even if on a Unix-like +system. Thus, all `'\'` or `'\\'` are turned to '/', even if you meant +to escape a character. + +Examples + +```meson +fs.as_posix('\\') == '/' # true +fs.as_posix('\\\\') == '/' # true + +fs.as_posix('foo\\bar/baz') == 'foo/bar/baz' # true +``` + +### replace_suffix + +The `replace_suffix` method is a *string manipulation* convenient for +filename modifications. It allows changing the filename suffix like: + +#### swap suffix + +```meson +original = '/opt/foo.ini' +new = fs.replace_suffix(original, '.txt') # /opt/foo.txt +``` + +#### add suffix + +```meson +original = '/opt/foo' +new = fs.replace_suffix(original, '.txt') # /opt/foo.txt +``` + +#### compound suffix swap + +```meson +original = '/opt/foo.dll.a' +new = fs.replace_suffix(original, '.so') # /opt/foo.dll.so +``` + +#### delete suffix + +```meson +original = '/opt/foo.dll.a' +new = fs.replace_suffix(original, '') # /opt/foo.dll +``` + +### parent + +Returns the parent directory (i.e. dirname). + +```meson +new = fs.parent('foo/bar') # foo +new = fs.parent('foo/bar/baz.dll') # foo/bar +``` + +### name + +Returns the last component of the path (i.e. basename). + +```meson +fs.name('foo/bar/baz.dll.a') # baz.dll.a +``` + +### stem + +*since 0.54.0* + +Returns the last component of the path, dropping the last part of the +suffix + +```meson +fs.stem('foo/bar/baz.dll') # baz +fs.stem('foo/bar/baz.dll.a') # baz.dll +``` + +### read +- `read(path, encoding: 'utf-8')` *(since 0.57.0)*: + return a [string](Syntax.md#strings) with the contents of the given `path`. + If the `encoding` keyword argument is not specified, the file specified by + `path` is assumed to be utf-8 encoded. Binary files are not supported. The + provided paths should be relative to the current `meson.current_source_dir()` + or an absolute path outside the build directory is accepted. If the file + specified by `path` changes, this will trigger Meson to reconfigure the + project. If the file specified by `path` is a `files()` object it + cannot refer to a built file. diff --git a/meson/docs/markdown/Generating-sources.md b/meson/docs/markdown/Generating-sources.md new file mode 100644 index 000000000..c09819f60 --- /dev/null +++ b/meson/docs/markdown/Generating-sources.md @@ -0,0 +1,206 @@ +--- +short-description: Generation of source files before compilation +... + +# Generating sources + +Sometimes source files need to be preprocessed before they are passed +to the actual compiler. As an example you might want build an IDL +compiler and then run some files through that to generate actual +source files. In Meson this is done with +[`generator()`](Reference-manual.md#generator) or +[`custom_target()`](Reference-manual.md#custom_target). + +## Using custom_target() + +Let's say you have a build target that must be built using sources +generated by a compiler. The compiler can either be a built target: + +```meson +mycomp = executable('mycompiler', 'compiler.c') +``` + +Or an external program provided by the system, or script inside the +source tree: + +```meson +mycomp = find_program('mycompiler') +``` + +Custom targets can take zero or more input files and use them to +generate one or more output files. Using a custom target, you can run +this compiler at build time to generate the sources: + +```meson +gen_src = custom_target('gen-output', + input : ['somefile1.c', 'file2.c'], + output : ['out.c', 'out.h'], + command : [mycomp, '@INPUT@', + '--c-out', '@OUTPUT0@', + '--h-out', '@OUTPUT1@']) +``` + +The `@INPUT@` there will be transformed to `'somefile1.c' +'file2.c'`. Just like the output, you can also refer to each input +file individually by index. + +Then you just put that in your program and you're done. + +### Generating headers + +Adding a generated header to a source list will ensure that the header +is generated and that the proper include paths are created for the +target: + +```meson +prog_python = import('python').find_installation('python3') + +foo_c = custom_target( + 'foo.c', + output : 'foo.c', + input : 'my_gen.py', + command : [prog_python, '@INPUT@', '--code', '@OUTPUT@'], +) + +foo_h = custom_target( + 'foo.h', + output : 'foo.h', + input : 'my_gen.py', + command : [prog_python, '@INPUT@', '--header', '@OUTPUT@'], +) + +libfoo = static_library('foo', [foo_c, foo_h]) + +executable('myexe', ['main.c', foo_h], link_with : libfoo) +``` + +Each target that depends on a generated header should add that header +to it's sources, as seen above with `libfoo` and `myexe`. This is +because there is no way for Meson or the backend to know that `myexe` +depends on `foo.h` just because `libfoo` does, it could be a private +header. + +### Generating multiple files at a time + +Sometimes it makes sense for a single generator to create two or more +files at a time, (perhaps a header and source file), Meson has this +case covered as well. `custom_target`s can be indexed like a list to +get each output file separately. The order is the same as the order of +the output argument to `custom_target` + +```meson +prog_python = import('python').find_installation('python3') + +foo_ch = custom_target( + 'foo.[ch]', + output : ['foo.c', 'foo.h'], + input : 'my_gen.py', + command : [prog_python, '@INPUT@', '@OUTPUT@'], +) + +libfoo = static_library('foo', [foo_ch]) + +executable('myexe', ['main.c', foo_ch[1]], link_with : libfoo) +``` + +In this case `libfoo` depends on both `foo.c` and `foo.h` but `myexe` +only depends on `foo.h`, the second output. + +### Using dependencies to manage generated resources + +In some cases it might be easier to use `declare_dependency` to +"bundle" the header and library dependency, especially if there are +many generated headers: + +```meson +idep_foo = declare_dependency( + sources : [foo_h, bar_h], + link_with : [libfoo], +) +``` + +See [dependencies](Dependencies.md#declaring-your-own), and +[reference](Reference-manual.md#declare_dependency) for more +information. + +## Using generator() + +Generators are similar to custom targets, except that we define a +*generator*, which defines how to transform an input file into one or +more output files, and then use that on as many input files as we +want. + +Note that generators should only be used for outputs that will only be +used as inputs for a build target or a custom target. When you use the +processed output of a generator in multiple targets, the generator +will be run multiple times to create outputs for each target. Each +output will be created in a target-private directory `@BUILD_DIR@`. + +If you want to generate files for general purposes such as for +generating headers to be used by several sources, or data that will be +installed, and so on, use a +[`custom_target()`](Reference-manual.md#custom_target) instead. + + +```meson +gen = generator(mycomp, + output : '@BASENAME@.c', + arguments : ['@INPUT@', '@OUTPUT@']) +``` + +The first argument is the executable file to run. The next file +specifies a name generation rule. It specifies how to build the output +file name for a given input name. `@BASENAME@` is a placeholder for +the input file name without preceding path or suffix (if any). So if +the input file name were `some/path/filename.idl`, then the output +name would be `filename.c`. You can also use `@PLAINNAME@`, which +preserves the suffix which would result in a file called +`filename.idl.c`. The last line specifies the command line arguments +to pass to the executable. `@INPUT@` and `@OUTPUT@` are placeholders +for the input and output files, respectively, and will be +automatically filled in by Meson. If your rule produces multiple +output files and you need to pass them to the command line, append the +location to the output holder like this: `@OUTPUT0@`, `@OUTPUT1@` and +so on. + +With this rule specified we can generate source files and add them to +a target. + +```meson +gen_src = gen.process('input1.idl', 'input2.idl') +executable('program', 'main.c', gen_src) +``` + +Generators can also generate multiple output files with unknown names: + +```meson +gen2 = generator(someprog, + output : ['@BASENAME@.c', '@BASENAME@.h'], + arguments : ['--out_dir=@BUILD_DIR@', '@INPUT@']) +``` + +In this case you can not use the plain `@OUTPUT@` variable, as it +would be ambiguous. This program only needs to know the output +directory, it will generate the file names by itself. + +To make passing different additional arguments to the generator +program at each use possible, you can use the `@EXTRA_ARGS@` string in +the `arguments` list. Note that this placeholder can only be present +as a whole string, and not as a substring. The main reason is that it +represents a list of strings, which may be empty, or contain multiple +elements; and in either case, interpolating it into the middle of a +single string would be troublesome. If there are no extra arguments +passed in from a `process()` invocation, the placeholder is entirely +omitted from the actual list of arguments, so an empty string won't be +passed to the generator program because of this. If there are multiple +elements in `extra_args`, they are inserted into to the actual +argument list as separate elements. + +```meson +gen3 = generator(genprog, + output : '@BASENAME@.cc', + arguments : ['@INPUT@', '@EXTRA_ARGS@', '@OUTPUT@']) +gen3_src1 = gen3.process('input1.y') +gen3_src2 = gen3.process('input2.y', extra_args: '--foo') +gen3_src3 = gen3.process('input3.y', extra_args: ['--foo', '--bar']) +``` diff --git a/meson/docs/markdown/Getting-meson.md b/meson/docs/markdown/Getting-meson.md new file mode 100644 index 000000000..ba3cdf73c --- /dev/null +++ b/meson/docs/markdown/Getting-meson.md @@ -0,0 +1,96 @@ +# Getting Meson + +Meson is implemented in Python 3, and requires 3.6 or newer. If your +operating system provides a package manager, you should install it +with that. For platforms that don't have a package manager, you need +to download it from [Python's home page]. See below for +[platform-specific Python3 quirks](#platformspecific-install-quirks). + +## Downloading Meson + +Meson releases can be downloaded from the [GitHub release page], and +you can run `./meson.py` from inside a release or the git repository +itself without doing anything special. + +On Windows, if you did not install Python with the installer options +that make Python scripts executable, you will have to run `python +/path/to/meson.py`, where `python` is Python 3.6 or newer. + +The newest development code can be obtained directly from [Git], and +we strive to ensure that it will always be working and usable. All +commits go through a pull-request process that runs CI and tests +several platforms. + +## Installing Meson with pip + +Meson is available in the [Python Package Index] and can be installed +with `sudo pip3 install meson` which requires root and will install it +system-wide. + +If you have downloaded a copy of the Meson sources already, you can +install it with `sudo pip3 install path/to/source/root/`. + +Alternatively, you can use `pip3 install --user meson` which will +install it for your user and does not require any special +privileges. This will install the package in `~/.local/`, so you will +have to add `~/.local/bin` to your `PATH`, and `sudo meson install` +will be completely broken since the program will not be available to +root. Only use a user copy of Meson if you do not care about +installing projects as root. + +## Installing Meson and Ninja with the MSI installer + +We provide an MSI installer on the [GitHub release page] that can be +used to install both Meson and Ninja at once for Windows. It also +contains an embedded copy of Python, so scripts that use the [Python +module](Python-module.md) and do not have any external dependencies +will continue to work as expected. + +Please note that this is a new feature, so bug reports are expected +and welcome! + +## Dependencies + +In the most common case, you will need the [Ninja executable] for +using the `ninja` backend, which is the default in Meson. This backend +can be used on all platforms and with all toolchains, including GCC, +Clang, Visual Studio, MinGW, ICC, ARMCC, etc. + +You can use the version provided by your package manager if possible, +otherwise download the binary executable from the [Ninja project's +release page](https://github.com/ninja-build/ninja/releases). + +If you will only use the Visual Studio backend (`--backend=vs`) to +generate Visual Studio solutions on Windows or the XCode backend +(`--backend=xcode`) to generate XCode projects on macOS, you do not +need Ninja. + +# Platform-specific install quirks + +## Windows Python3 quirks + +When installing Python 3, it is highly recommended (but not required) +that you select the installer options as follows: + +![installer step 1](images/py3-install-1.png "Enable 'Add Python 3.6 to PATH' and click 'Customize installation'") +![installer step 2](images/py3-install-2.png "Optional Features: ensure 'pip' is enabled") +![installer step 3](images/py3-install-3.png "Advanced Options: enable 'Install for all users'") + +With this, you will have `python` and `pip` in `PATH`, and you can +install Meson with pip. You will also be able to directly run `meson` +in any shell on Windows instead of having to run `py -3` with the full +path to the `meson.py` script. + +## MSYS2 Python3 quirks + +If you are using MSYS2 on Windows as your development environment, +please make sure that you **do not use** the `msys/python` package to +provide Python 3. Use either `mingw32/mingw-w64-i686-python3` or +`mingw64/mingw-w64-x86_64-python3` depending on which MinGW target you +are building for. + + [GitHub release page]: https://github.com/mesonbuild/meson/releases + [Python Package Index]: https://pypi.python.org/pypi/meson/ + [Git]: https://github.com/mesonbuild/meson + [Python's home page]: https://www.python.org/downloads/ + [Ninja executable]: https://ninja-build.org/ diff --git a/meson/docs/markdown/Getting-meson_ptbr.md b/meson/docs/markdown/Getting-meson_ptbr.md new file mode 100644 index 000000000..f2e2811ec --- /dev/null +++ b/meson/docs/markdown/Getting-meson_ptbr.md @@ -0,0 +1,93 @@ +# Obtendo o Meson + +Meson é implementado em Python 3, e requer a versão 3.6 ou mais nova. +se o seu sistema operacional provê um gerenciador de pacotes, você deve +instalar o Meson com ele. Para plataformas que não tem um gerenciador de +pacotes, você precisa baixa-lo da [página inicial do Python]. Veja abaixo +[peculiaridades do Python3 específicas de plataformas](#platformspecific-install-quirks). + +## Baixando o Meson + +*Releases* do Meson podem ser baixadas da [página de releases do GitHub] +e você pode executar `./meson.py` de dentro do *release* ou do próprio repositório +do git sem fazer nada de especial. + +No Windows, se você não instalar o Python com a opção do instalador que fazem +os *scripts* Python executáveis, você vai ter que executar `python +/path/to/meson.py`, onde `python` é o Python 3.6 ou mais novo. + +O código de desenvolvimento mais recente pode ser obtido diretamente do [Git], +e nós lutamos para garatir que ele vai estar sempre funcionando e usável. Todos +*commits* passam por um processo de *pull-request* que executa CI e testam diversas +plataformas. + +## Instalando o Meson com o pip + +O Meson está disponível no [Indice de Pacotes do Python] e pode ser instalado com +`sudo pip3 install meson` que requer root e vai instala-lo para todo o sistema. + +Se você já baixou uma cópia do código do Meson, você pode instalar com +`sudo pip3 install path/to/source/root/`. + +Como alternativa, você pode usar o `pip3 install --user meson` que vai instalar +o Meson para o seu usuário e não requer nenhum privilégio especial. Esse comando +vai instalar o pacote em `~/.local/`, então você terá que adicionar `~/.local/bin` +para o seu `PATH`, e `sudo meson install` vai estar completamente quebrado já que +o programa não vai estar disponível para o root. Apeas use uma cópia de usuário do Meson +se você não se importa sobre instalar os projetos como root. + +## Instalando o Meson e o Ninja com o instalador MSI + +Nós provemos um instalador MSI na [página de *release* do GitHub] que pode ser usada +para instalar tanto o Meson quanto o Ninja de uma vez para o Windows. O instalador também +contém uma cópia integrada do Python, então scripts que usam o [módulo Python](Python-module.md) +e não tem nenhuma dependência externa vão continuar funcionando como esperado. + +Por favor, note que essa é uma funcionalidade nova, então relatórios de bugs são esperados e bem-vindos! + +## Dependências + +Na maioria dos casos comums, você vai precisar do [executável do Ninja] para usar o *backend* do `ninja`, +que é o padrão no Meson. Esse *backend* pode ser usado em todas plataformas e com todas **toolchains**, incluindo o GCC, +Clang, Visual Studio, MinGW, ICC, ARMCC, etc. + +Você deve usar a versão provida pelo seu gerenciador de pacotes se possível, caso contrário, +baixe o binário executável da [página de *release* do projeto Ninja](https://github.com/ninja-build/ninja/releases). + +Se você apenas usa o *backend* do Visual Studio (`--backend=vs`) para gerar soluções do Visual Studio no Windows ou o +*backend* do XCode (`--backend=xcode`) para gerar projetos no macOS, você não precisa do Ninja. + + + +# Peculiaridades de instalação específicas de plataformas + +## Peculiaridades do Python 3 no Windows + +Quando estiver instalando o Python 3, é altamente recomendável (mas não obrigatório) +que você selecione as opções do instalador como a seguir: + +![passo 1 do instalador](images/py3-install-1.png "Ative 'Add Python 3.6 to PATH' e clique em 'Customize installation'") + +![passo 2 do instalador](images/py3-install-2.png "Funcionalidade Opcional: garanta que 'pip' está ativado") + +![passo 3 do instalador](images/py3-install-3.png "Opções Avançadas: ative 'Instalar para todos usuários'") + +Com isso, você terá o `python` e o `pip` no `PATH`, e você poderá instalar o Meson com o pip. Você também vai poder +executar o comando `meson` em qualquer shell no Windows ao invés de ter que executar `py -3` com o caminho completo para +o *script* `meson.py`. + +## Peculiaridades do Python 3 no MSYS2 + +Se você está usando o MSYS2 no Windows como seu ambiente de desenvolvimento, +por favor se certifique que você não esteja usando o pacote `msys/python` para +fornecer o Python 3. Uso o `mingw32/mingw-w64-i686-python3` ou o +If you are using MSYS2 on Windows as your development environment, +please make sure that you **do not use** the `msys/python` package to +provide Python 3. Use either `mingw32/mingw-w64-i686-python3` or `mingw64/mingw-w64-x86_64-python3` +dependendo de para qual *target* do MinGW você está compilando. + + [página de *release* do GitHub]: https://github.com/mesonbuild/meson/releases + [Indice de Pacotes do Python]: https://pypi.python.org/pypi/meson/ + [Git]: https://github.com/mesonbuild/meson + [página inicial do Python]: https://www.python.org/downloads/ + [executável do Ninja]: https://ninja-build.org/ diff --git a/meson/docs/markdown/Getting-meson_zh.md b/meson/docs/markdown/Getting-meson_zh.md new file mode 100644 index 000000000..4a4cb3431 --- /dev/null +++ b/meson/docs/markdown/Getting-meson_zh.md @@ -0,0 +1,56 @@ +# 获å–Meson + +Meson基于Python3è¿è¡Œï¼Œè¦æ±‚Python版本3.5以上。 如果你的æ“作系统æ供包管ç†å™¨, 你应该用包管ç†å™¨å®‰è£…meson;如果没有包管ç†å™¨ï¼Œä½ åº”该在[Python主页]下载åˆé€‚çš„Python3。相关请å‚阅[特殊平å°çš„安装特例](#特殊平å°çš„安装特例). + +## 下载Meson + +Mesonå‘行版å¯åœ¨ [GitHubå‘行页é¢]下载, ä½ å¯ä»¥åœ¨release的解压目录或者git仓库目录里直接è¿è¡Œ `./meson.py` ,ä¸éœ€è¦è¿›è¡Œä»»ä½•ç‰¹æ®Šæ“作。 + +在Windows下,如果你安装Python时没有将Python路径添加到环境å˜é‡, 那你应该使用`python /path/to/meson.py`命令è¿è¡ŒMeson,当然`python` 的版本应该大于3.5。 + +最新的开å‘版本的æºç å¯ä»¥ç›´æŽ¥é€šè¿‡[Git]获得,我们尽å¯èƒ½ä¿è¯å®ƒæ€»æ˜¯å¯ä»¥æ­£å¸¸ä½¿ç”¨ã€‚所有的æ交通过pull-request进行,此过程将è¿è¡Œ CI 并且会在多个平å°è¿›è¡Œæµ‹è¯•ã€‚ + +## 使用pip安装Meson + +Meson在[Python包索引]中,å¯é€šè¿‡`pip3 install meson`命令安装,如果在root环境下,它会在系统范围内安装。 + +相å,你也å¯ä»¥ä½¿ç”¨ `pip3 install --user meson`命令æ¥ä¸º`user`用户å•ç‹¬å®‰è£…,此过程ä¸éœ€è¦ä»»ä½•ç‰¹æ®Šæƒé™. Meson会被安装到`~/.local/`目录下,所以你需è¦å°† `~/.local/bin`添加至你的`PATH`. + +## 使用MSI安装包安装Mesonå’ŒNinja + +我们也在[GitHubå‘行页é¢]æä¾›MSI安装包,å¯ä»¥åŒæ—¶ä¸ºWindows安装 Mesonå’ŒNinja。 它也包å«ä¸€ä»½åµŒå…¥æ€§çš„Pythonæ‹·è´, 所以[Python module](Python-module.md)å¯ä»¥ä¸é ä»»ä½•å¤–部ä¾èµ–的情况下正如期望般的正常工作。 + +因为这是新特性,请多留æ„,如果出现BUG欢迎åé¦ˆï¼ + +## 所需ä¾èµ– + +最主è¦çš„, ä½ éœ€è¦ [Ninjaå¯æ‰§è¡Œç¨‹åº] æ¥ä½¿ç”¨Meson默认的 +`ninja` åŽç«¯å‚数。这个å‚æ•°å¯ä»¥ç”¨äºŽæ‰€æœ‰å¹³å°å’Œå·¥å…·é“¾åŒ…括 GCC, Clang, Visual Studio, MinGW,ICC, ARMCC. + +如果å¯èƒ½ï¼Œä½ åº”该使用包管ç†å™¨æ供的版本,å¦åˆ™çš„è¯ï¼Œä½ åº”该 +在[Ninjaå‘行页é¢]下载å¯æ‰§è¡ŒäºŒè¿›åˆ¶æ–‡ä»¶ã€‚(https://github.com/ninja-build/ninja/releases). + +如果你åªç”¨Visual StudioåŽç«¯å‚æ•° (`--backend=vs`)æ¥ç”ŸæˆWindows上的Visual Studio工程文件或者XCodeåŽç«¯å‚æ•° (`--backend=xcode`) 生æˆmacOS上的 +XCode工程文件, 那么你ä¸éœ€è¦å®‰è£…Ninja. + +# 特殊平å°çš„安装特例 + +## Windows Python3 + +安装Python3æ—¶,强烈推è以下安装器选项 (éžå¿…须,请结åˆå®žé™…) : + +![installer step 1](images/py3-install-1.png "Enable 'Add Python 3.6 to PATH' and click 'Customize installation'") +![installer step 2](images/py3-install-2.png "Optional Features: ensure 'pip' is enabled") +![installer step 3](images/py3-install-3.png "Advanced Options: enable 'Install for all users'") + +完æˆè¿™ä¸ªä¹‹åŽ, `python` å’Œ`pip`的路径会收录进`PATH`,ä½ å¯ä»¥ä½¿ç”¨pip安装Meson. 你也å¯ä»¥åœ¨Windows任何终端下直接è¿è¡Œ`meson`而ä¸æ˜¯ä¸å¾—ä¸è¾“å…¥`py -3`完整路径去è¿è¡Œ `meson.py`脚本. + +## MSYS2 Python3 + +如果你使用MSYS2作为你的Windowså¼€å‘环境,请确信你**没有**å°† `msys/python` 当作你的默认Python使用. 请使用 `mingw32/mingw-w64-i686-python3` 或者 `mingw64/mingw-w64-x86_64-python3`,这å–决于MinGWä¸åŒçš„构建对象. + + [GitHubå‘行页é¢]: https://github.com/mesonbuild/meson/releases + [Python包索引]: https://pypi.python.org/pypi/meson/ + [Git]: https://github.com/mesonbuild/meson + [Python主页]: https://www.python.org/downloads/ + [Ninjaå¯æ‰§è¡Œç¨‹åº]: https://ninja-build.org/ diff --git a/meson/docs/markdown/Gnome-module.md b/meson/docs/markdown/Gnome-module.md new file mode 100644 index 000000000..2b90e0ceb --- /dev/null +++ b/meson/docs/markdown/Gnome-module.md @@ -0,0 +1,378 @@ +# GNOME module + +This module provides helper tools for build operations needed when +building Gnome/GLib programs. + +**Note**: the compilation commands here might not work properly when + you change the source files. This is a bug in the respective + compilers which do not expose the required dependency + information. This has been reported upstream in [this bug]. Until + this is fixed you need to be careful when changing your source + files. + + [this bug]: https://bugzilla.gnome.org/show_bug.cgi?id=745754 + +## Usage + +To use this module, just do: **`gnome = import('gnome')`**. The +following functions will then be available as methods on the object +with the name `gnome`. You can, of course, replace the name `gnome` +with anything else. + +### gnome.compile_resources() + +This function compiles resources specified in an XML file into code +that can be embedded inside the main binary. Similar a build target it +takes two positional arguments. The first one is the name of the +resource and the second is the XML file containing the resource +definitions. If the name is `foobar`, Meson will generate a header +file called `foobar.h`, which you can then include in your sources. + +* `c_name`: passed to the resource compiler as an argument after + `--c-name` +* `dependencies`: extra targets to depend upon for building +* `export`: (*Added 0.37.0*) if true, export the symbols of the + generated sources +* `extra_args`: extra command line arguments to pass to the resource +* `gresource_bundle`: (*Added 0.37.0*) if true, output a `.gresource` + file instead of source +* `install`: (*Added 0.37.0*) if true, install the gresource file +* `install_dir`: (*Added 0.37.0*) location to install the header or + bundle depending on previous options +* `install_header`: (*Added 0.37.0*) if true, install the header file +* `source_dir`: a list of directories where the resource compiler + should look up the files + +Returns an array containing: `[c_source, header_file]` or +`[gresource_bundle]` + +Example: + +```meson +gnome = import('gnome') + +asresources = gnome.compile_resources( + 'as-resources', 'data/asresources.gresource.xml', + source_dir: 'data', + c_name: 'as' +) + +executable( + meson.project_name(), + asresources, + dependencies: my_deps, + install: true +) +``` + +### gnome.generate_gir() + +Generates GObject introspection data. + +Takes one or more positional arguments: + +Either one or more library objects you want to build gir data for, or a single +executable object. + +There are several keyword arguments. Many of these map directly to the +`g-ir-scanner` tool so see its documentation for more information. + +* `dependencies`: deps to use during introspection scanning +* `extra_args`: command line arguments to pass to gir compiler +* `export_packages`: extra packages the gir file exports +* `sources`: the list of sources to be scanned for gir data +* `nsversion`: namespace version +* `namespace`: the namespace for this gir object which determines + output files +* `identifier_prefix`: the identifier prefix for the gir object, + e.g. `Gtk` +* `includes`: list of gir names to be included, can also be a GirTarget +* `header`: *(Added 0.43.0)* name of main c header to include for the library, e.g. `glib.h` +* `include_directories`: extra include paths to look for gir files +* `install`: if true, install the generated files +* `install_dir_gir`: (*Added 0.35.0*) which directory to install the + gir file into +* `install_dir_typelib`: (*Added 0.35.0*) which directory to install + the typelib file into +* `link_with`: list of libraries to link with +* `symbol_prefix`: the symbol prefix for the gir object, e.g. `gtk`, + (*Since 0.43.0*) an ordered list of multiple prefixes is allowed +* `fatal_warnings`: *Since 0.55.0* turn scanner warnings into fatal errors. + +Returns an array of two elements which are: `[gir_target, +typelib_target]` + +### gnome.genmarshal() + +Generates a marshal file using the `glib-genmarshal` tool. The first +argument is the basename of the output files. + +* `extra_args`: (*Added 0.42.0*) additional command line arguments to + pass +* `install_header`: if true, install the generated header +* `install_dir`: directory to install header to +* `nostdinc`: if true, don't include the standard marshallers from + glib +* `internal`: if true, mark generated sources as internal to + `glib-genmarshal` (*Requires GLib 2.54*) +* `prefix`: the prefix to use for symbols +* `skip_source`: if true, skip source location comments +* `stdinc`: if true, include the standard marshallers from glib +* `sources`: the list of sources to use as inputs +* `valist_marshallers`: if true, generate va_list marshallers + +*Added 0.35.0* + +Returns an array of two elements which are: `[c_source, header_file]` + +### gnome.mkenums() + +Generates enum files for GObject using the `glib-mkenums` tool. The +first argument is the base name of the output files, unless +`c_template` and `h_template` are specified. In this case, the output +files will be the base name of the values passed as templates. + +This method is essentially a wrapper around the `glib-mkenums` tool's +command line API. It is the most featureful method for enum creation. + +Typically you either provide template files or you specify the various +template sections manually as strings. + +Most libraries and applications will be using the same standard +template with only minor tweaks, in which case the +`gnome.mkenums_simple()` convenience method can be used instead. + +Note that if you `#include` the generated header in any of the sources +for a build target, you must add the generated header to the build +target's list of sources to codify the dependency. This is true for +all generated sources, not just `mkenums`. + +* `c_template`: template to use for generating the source +* `comments`: comment passed to the command +* `h_template`: template to use for generating the header +* `identifier_prefix`: prefix to use for the identifiers +* `install_header`: if true, install the generated header +* `install_dir`: directory to install the header +* `sources`: the list of sources to make enums with +* `symbol_prefix`: prefix to use for the symbols +* `eprod`: enum text +* `fhead`: file header +* `fprod`: file text +* `ftail`: file tail +* `vhead`: value text +* `vtail`: value tail + +*Added 0.35.0* + +Returns an array of two elements which are: `[c_source, header_file]` + +### gnome.mkenums_simple() + +Generates enum `.c` and `.h` files for GObject using the +`glib-mkenums` tool with the standard template used by most +GObject-based C libraries. The first argument is the base name of the +output files. + +Note that if you `#include` the generated header in any of the sources +for a build target, you must add the generated header to the build +target's list of sources to codify the dependency. This is true for +all generated sources, not just `mkenums_simple`. + +* `body_prefix`: additional prefix at the top of the body file, + e.g. for extra includes +* `decorator`: optional decorator for the function declarations, + e.g. `GTK_AVAILABLE` or `GST_EXPORT` +* `function_prefix`: additional prefix for function names, e.g. in + case you want to add a leading underscore to functions used only + internally +* `header_prefix`: additional prefix at the top of the header file, + e.g. for extra includes (which may be needed if you specify a + decorator for the function declarations) +* `install_header`: if true, install the generated header +* `install_dir`: directory to install the header +* `identifier_prefix`: prefix to use for the identifiers +* `sources`: the list of sources to make enums with +* `symbol_prefix`: prefix to use for the symbols + +Example: + +```meson +gnome = import('gnome') + +my_headers = ['myheader1.h', 'myheader2.h'] +my_sources = ['mysource1.c', 'mysource2.c'] + +# will generate myenums.c and myenums.h based on enums in myheader1.h and myheader2.h +enums = gnome.mkenums_simple('myenums', sources : my_headers) + +mylib = library('my', my_sources, enums, + include_directories: my_incs, + dependencies: my_deps, + c_args: my_cargs, + install: true) +``` + +*Added 0.42.0* + +Returns an array of two elements which are: `[c_source, header_file]` + +### gnome.compile_schemas() + +When called, this method will compile the gschemas in the current +directory. Note that this is not for installing schemas and is only +useful when running the application locally for example during tests. + +* `build_by_default`: causes, when set to true, to have this target be + built by default, that is, when invoking plain `meson compile`, the default + value is true for all built target types +* `depend_files`: files ([`string`](Reference-manual.md#string-object), + [`files()`](Reference-manual.md#files), or + [`configure_file()`](Reference-manual.md#configure_file)) of + schema source XML files that should trigger a re-compile if changed. + +### gnome.gdbus_codegen() + +Compiles the given XML schema into gdbus source code. Takes two +positional arguments, the first one specifies the base name to use +while creating the output source and header and the second specifies +one XML file. + +* `sources`: list of XML files +* `interface_prefix`: prefix for the interface +* `namespace`: namespace of the interface +* `extra_args`: (*Added 0.47.0*) additional command line arguments to pass +* `autocleanup`: *(Added 0.47.0)* if set generates autocleanup code. Can be one of `none`, `objects` or `all` +* `object_manager`: *(Added 0.40.0)* if true generates object manager code +* `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'` +* `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks +* `build_by_default`: causes, when set to true, to have this target be + built by default, that is, when invoking plain `meson compile`, the default + value is true for all built target types +* `install_dir`: (*Added 0.46.0*) location to install the header or + bundle depending on previous options +* `install_header`: (*Added 0.46.0*) if true, install the header file + +Starting *0.46.0*, this function returns a list of at least two custom +targets (in order): one for the source code and one for the header. +The list will contain a third custom target for the generated docbook +files if that keyword argument is passed. + +Earlier versions return a single custom target representing all the +outputs. Generally, you should just add this list of targets to a top +level target's source list. + +Example: + +```meson +gnome = import('gnome') + +# The returned source would be passed to another target +gdbus_src = gnome.gdbus_codegen('example-interface', + sources: 'com.example.Sample.xml', + interface_prefix : 'com.example.', + namespace : 'Sample', + annotations : [ + ['com.example.Hello()', 'org.freedesktop.DBus.Deprecated', 'true'] + ], + docbook : 'example-interface-doc' +) +``` + +### gnome.generate_vapi() + +Creates a VAPI file from gir. The first argument is the name of the +library. + +* `gir_dirs`: extra directories to include for gir files +* `install`: if true, install the VAPI file +* `install_dir`: location to install the VAPI file (defaults to datadir/vala/vapi) +* `metadata_dirs`: extra directories to include for metadata files +* `packages`: VAPI packages that are depended upon +* `sources`: the gir source to generate the VAPI from +* `vapi_dirs`: extra directories to include for VAPI files + +Returns a custom dependency that can be included when building other +VAPI or Vala binaries. + +*Added 0.36.0* + +### gnome.yelp() + +Installs help documentation using Yelp. The first argument is the +project id. + +This also creates two targets for translations +`help-$project-update-po` and `help-$project-pot`. + +* `languages`: list of languages for translations +* `media`: list of media such as images +* `sources`: list of pages +* `symlink_media`: if media should be symlinked not copied (defaults to `true` since 0.42.0) + +Note that very old versions of yelp may not support symlinked media; +At least 3.10 should work. + +*Added 0.36.0* + +### gnome.gtkdoc() + +Compiles and installs gtkdoc documentation into +`prefix/share/gtk-doc/html`. Takes one positional argument: The name +of the module. + +* `content_files`: a list of content files +* `dependencies`: a list of dependencies +* `fixxref_args`: a list of arguments to pass to `gtkdoc-fixxref` +* `gobject_typesfile`: a list of type files +* `include_directories`: extra include paths to pass to `gtkdoc-scangobj` +* `ignore_headers`: a list of header files to ignore +* `html_assets`: a list of assets for the HTML pages +* `html_args` a list of arguments to pass to `gtkdoc-mkhtml` +* `install`: if true, installs the generated docs +* `install_dir`: the directory to install the generated docs relative + to the gtk-doc html dir or an absolute path (default: module name) +* `main_xml`: specifies the main XML file +* `main_sgml`: equal to `main_xml` +* `mkdb_args`: a list of arguments to pass to `gtkdoc-mkdb` +* `namespace`: specifies the name space to pass to `gtkdoc-mkdb` +* `module_version`: the version of the module, affects the installed location and the devhelp2 file location +* `scan_args`: a list of arguments to pass to `gtkdoc-scan` +* `scanobjs_args`: a list of arguments to pass to `gtkdoc-scangobj` +* `c_args`: (*Added 0.48.0*) additional compile arguments to pass +* `src_dir`: include_directories to include +* `check`: (*Since 0.52.0*) if `true` runs `gtkdoc-check` when running unit tests. + Note that this has the downside of rebuilding the doc for each build, which is + often very slow. It usually should be enabled only in CI. + +This also creates a `$module-doc` target that can be run to build +documentation. Normally the documentation is only built on install. + +*Since 0.52.0* Returns a target object that can be passed as +dependency to other targets using generated doc files (e.g. in +`content_files` of another doc). + +### gnome.gtkdoc_html_dir() + +Takes as argument a module name and returns the path where that +module's HTML files will be installed. Usually used with +`install_data` to install extra files, such as images, to the output +directory. + +### gnome.post_install() + +*Since 0.57.0* + +Post-install update of various system wide caches. Each script will be executed +only once even if `gnome.post_install()` is called multiple times from multiple +subprojects. If `DESTDIR` is specified during installation all scripts will be +skipped. + +It takes the following keyword arguments: +- `glib_compile_schemas`: If set to `true`, update `gschemas.compiled` file in + `//glib-2.0/schemas`. +- `gio_querymodules`: List of directories relative to `prefix` where + `giomodule.cache` file will be updated. +- `gtk_update_icon_cache`: If set to `true`, update `icon-theme.cache` file in + `//icons/hicolor`. +- `update_desktop_database`: *Since 0.59.0* If set to `true`, update cache of + MIME types handled by desktop files in `//applications`. diff --git a/meson/docs/markdown/Hotdoc-module.md b/meson/docs/markdown/Hotdoc-module.md new file mode 100644 index 000000000..7d9fc555f --- /dev/null +++ b/meson/docs/markdown/Hotdoc-module.md @@ -0,0 +1,79 @@ +--- +short-description: Hotdoc module +authors: + - name: Thibault Saunier + email: tsaunier@igalia.com + years: [2018] + has-copyright: false +... + +# Hotdoc module + +This module provides helper functions for generating documentation using +[hotdoc]. + +*Added 0.48.0* + +## Usage + +To use this module, just do: **`hotdoc = import('hotdoc')`**. The +following functions will then be available as methods on the object +with the name `hotdoc`. You can, of course, replace the name `hotdoc` +with anything else. + +### hotdoc.generate_doc() + +Generates documentation using [hotdoc] and installs it into `$prefix/share/doc/html`. + +**Positional argument:** + +* `project_name`: The name of the hotdoc project + +**Keyworded arguments:** + +* `sitemap` (*[string] or [file]*) (**required**): The hotdoc sitemap file +* `index` (*[string] or [file]*) (**required**): Location of the index file +* `dependencies`(*[targets]*): Targets on which the documentation generation depends on. +* `subprojects`: A list of `HotdocTarget` that are used as subprojects for hotdoc to generate + the documentation. +* ... Any argument of `hotdoc` can be used replacing dashes (`-`) with underscores (`_`). + For a full list of available parameters, just have a look at `hotdoc help`. + +[file]: Reference-manual.md#files +[string]: Reference-manual.md#string-object +[targets]: Reference-manual.md#build-target-object + +**Returns:** + +`HotdocTarget`: A [`custom_target`](Reference-manual.md#custom-target-object) with the +following extra methods: + +* `config_path`: Path to the generated `hotdoc` configuration file. + +### hotdoc.has_extensions() + +**Positional arguments:** + +* `...`: The hotdoc extension names to look for + +**No keyworded arguments** + +**Returns:** `true` if all the extensions where found, `false` otherwise. + +### Example + +``` meson +hotdoc = import('hotdoc') + +hotdoc.generate_doc('foobar', + project_version: '0.1', + sitemap: 'sitemap.txt', + index: 'index.md', + c_sources: ['path/to/file.c'], + c_smart_index: true, + languages: ['c'], + install: true, +) +``` + +[hotdoc]: https://hotdoc.github.io/ \ No newline at end of file diff --git a/meson/docs/markdown/IDE-integration.md b/meson/docs/markdown/IDE-integration.md new file mode 100644 index 000000000..6f0b62916 --- /dev/null +++ b/meson/docs/markdown/IDE-integration.md @@ -0,0 +1,365 @@ +--- +short-description: Meson's API to integrate Meson support into an IDE +... + +# IDE integration + +Meson has exporters for Visual Studio and XCode, but writing a custom +backend for every IDE out there is not a scalable approach. To solve +this problem, Meson provides an API that makes it easy for any IDE or +build tools to integrate Meson builds and provide an experience +comparable to a solution native to the IDE. + +All the resources required for such a IDE integration can be found in +the `meson-info` directory in the build directory. + +The first thing to do when setting up a Meson project in an IDE is to +select the source and build directories. For this example we assume +that the source resides in an Eclipse-like directory called +`workspace/project` and the build tree is nested inside it as +`workspace/project/build`. First, we initialize Meson by running the +following command in the source directory. + + meson builddir + +With this command Meson will configure the project and also generate +introspection information that is stored in `intro-*.json` files in +the `meson-info` directory. The introspection dump will be +automatically updated when Meson is (re)configured, or the build +options change. Thus, an IDE can watch for changes in this directory +to know when something changed. Note that `meson-info.json` guaranteed +to be the last file written. + +The `meson-info` directory should contain the following files: + +| File | Description | +| ------------------------------ | ------------------------------------------------------------------- | +| `intro-benchmarks.json` | Lists all benchmarks | +| `intro-buildoptions.json` | Contains a full list of Meson configuration options for the project | +| `intro-buildsystem_files.json` | Full list of all Meson build files | +| `intro-dependencies.json` | Lists all dependencies used in the project | +| `intro-installed.json` | Contains mapping of files to their installed location | +| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) | +| `intro-targets.json` | Full list of all build targets | +| `intro-tests.json` | Lists all tests with instructions how to run them | + +The content of the JSON files is further specified in the remainder of +this document. + +## The `targets` section + +The most important file for an IDE is probably `intro-targets.json`. +Here each target with its sources and compiler parameters is +specified. The JSON format for one target is defined as follows: + +```json +{ + "name": "Name of the target", + "id": "The internal ID meson uses", + "type": "", + "defined_in": "/Path/to/the/targets/meson.build", + "subproject": null, + "filename": ["list", "of", "generated", "files"], + "build_by_default": true / false, + "target_sources": [], + "extra_files": ["/path/to/file1.hpp", "/path/to/file2.hpp"], + "installed": true / false, +} +``` + +If the key `installed` is set to `true`, the key `install_filename` +will also be present. It stores the installation location for each +file in `filename`. If one file in `filename` is not installed, its +corresponding install location is set to `null`. + +The `subproject` key specifies the name of the subproject this target +was defined in, or `null` if the target was defined in the top level +project. + +*(New in 0.56.0)* The `extra_files` key lists all files specified via +the `extra_files` kwarg of a build target. See +[`executable()`](Reference-manual.md#executable). + +A target usually generates only one file. However, it is possible for +custom targets to have multiple outputs. + +### Target sources + +The `intro-targets.json` file also stores a list of all source objects +of the target in the `target_sources`. With this information, an IDE +can provide code completion for all source files. + +```json +{ + "language": "language ID", + "compiler": ["The", "compiler", "command"], + "parameters": ["list", "of", "compiler", "parameters"], + "sources": ["list", "of", "all", "source", "files", "for", "this", "language"], + "generated_sources": ["list", "of", "all", "source", "files", "that", "where", "generated", "somewhere", "else"] +} +``` + +It should be noted that the compiler parameters stored in the +`parameters` differ from the actual parameters used to compile the +file. This is because the parameters are optimized for the usage in an +IDE to provide autocompletion support, etc. It is thus not recommended +to use this introspection information for actual compilation. + +### Possible values for `type` + +The following table shows all valid types for a target. + +| value of `type` | Description | +| ---------------- | --------------------------------------------------------------------------------------------- | +| `executable` | This target will generate an executable file | +| `static library` | Target for a static library | +| `shared library` | Target for a shared library | +| `shared module` | A shared library that is meant to be used with dlopen rather than linking into something else | +| `custom` | A custom target | +| `run` | A Meson run target | +| `jar` | A Java JAR target | + +### Using `--targets` without a build directory + +It is also possible to get most targets without a build directory. +This can be done by running `meson introspect --targets +/path/to/meson.build`. + +The generated output is similar to running the introspection with a +build directory or reading the `intro-targets.json`. However, there +are some key differences: + +- The paths in `filename` now are _relative_ to the future build directory +- The `install_filename` key is completely missing +- There is only one entry in `target_sources`: + - With the language set to `unknown` + - Empty lists for `compiler` and `parameters` and `generated_sources` + - The `sources` list _should_ contain all sources of the target + +There is no guarantee that the sources list in `target_sources` is +correct. There might be differences, due to internal limitations. It +is also not guaranteed that all targets will be listed in the output. +It might even be possible that targets are listed, which won't exist +when Meson is run normally. This can happen if a target is defined +inside an if statement. Use this feature with care. + +## Build Options + +The list of all build options (build type, warning level, etc.) is +stored in the `intro-buildoptions.json` file. Here is the JSON format +for each option. + +```json +{ + "name": "name of the option", + "description": "the description", + "type": "type ID", + "value": "value depends on type", + "section": "section ID", + "machine": "machine ID" +} +``` + +The supported types are: + + - string + - boolean + - combo + - integer + - array + +For the type `combo` the key `choices` is also present. Here all valid +values for the option are stored. + +The possible values for `section` are: + + - core + - backend + - base + - compiler + - directory + - user + - test + +The `machine` key specifies the machine configuration for the option. +Possible values are: + + - any + - host + - build + +To set the options, use the `meson configure` command. + +Since Meson 0.50.0 it is also possible to get the default buildoptions +without a build directory by providing the root `meson.build` instead +of a build directory to `meson introspect --buildoptions`. + +Running `--buildoptions` without a build directory produces the same +output as running it with a freshly configured build directory. + +However, this behavior is not guaranteed if subprojects are present. +Due to internal limitations all subprojects are processed even if they +are never used in a real Meson run. Because of this options for the +subprojects can differ. + +## The dependencies section + +The list of all _found_ dependencies can be acquired from +`intro-dependencies.json`. Here, the name, version, compiler and +linker arguments for a dependency are listed. + +### Scanning for dependecie with `--scan-dependencies` + +It is also possible to get most dependencies used without a build +directory. This can be done by running `meson introspect +--scan-dependencies /path/to/meson.build`. + +The output format is as follows: + +```json +[ + { + "name": "The name of the dependency", + "required": true, + "version": [">=1.2.3"], + "conditional": false, + "has_fallback": false + } +] +``` + +The `required` keyword specifies whether the dependency is marked as +required in the `meson.build` (all dependencies are required by +default). The `conditional` key indicates whether the `dependency()` +function was called inside a conditional block. In a real Meson run +these dependencies might not be used, thus they _may_ not be required, +even if the `required` key is set. The `has_fallback` key just +indicates whether a fallback was directly set in the `dependency()` +function. The `version` key always contains a list of version +requirements from the `meson.build` and **not** the actual version of +the dependency on disc. The version list is empty if no version was +specified in the `meson.build`. + +## Tests + +Compilation and unit tests are done as usual by running the `meson +compile` and `meson test` commands. A JSON formatted result log can be +found in `workspace/project/builddir/meson-logs/testlog.json`. + +When these tests fail, the user probably wants to run the failing test +in a debugger. To make this as integrated as possible, extract the +tests from the `intro-tests.json` and `intro-benchmarks.json` files. +This provides you with all the information needed to run the test: +what command to execute, command line arguments, environment variable +settings and how to process the output. + +```json +{ + "name": "name of the test", + "workdir": "the working directory (can be null)", + "timeout": "the test timeout", + "suite": ["list", "of", "test", "suites"], + "is_parallel": true / false, + "protocol": "exitcode" / "tap", + "cmd": ["command", "to", "run"], + "depends": ["target1-id", "target2-id"], + "env": { + "VARIABLE1": "value 1", + "VARIABLE2": "value 2" + } +} +``` + +The `depends` entry *(since 0.56.0)* contains target ids; they can be +looked up in the targets introspection data. The executable pointed to +by `cmd` is also included in the entry, as are any arguments to the +test that are build products. + +## Build system files + +It is also possible to get Meson build files used in your current +project. This can be done by running `meson introspect +--buildsystem-files /path/to/builddir`. + +The output format is as follows: + +```json +[ + "/Path/to/the/targets/meson.build", + "/Path/to/the/targets/meson_options.txt", + "/Path/to/the/targets/subdir/meson.build" +] +``` + +# Programmatic interface + +Meson also provides the `meson introspect` for project introspection +via the command line. Use `meson introspect -h` to see all available +options. + +This API can also work without a build directory for the +`--projectinfo` command. + +# AST of a `meson.build` + +Since Meson *0.55.0* it is possible to dump the AST of a `meson.build` +as a JSON object. The interface for this is `meson introspect --ast +/path/to/meson.build`. + +Each node of the AST has at least the following entries: + +| Key | Description | +| ------------ | ------------------------------------------------------- | +| `node` | Type of the node (see following table) | +| `lineno` | Line number of the node in the file | +| `colno` | Column number of the node in the file | +| `end_lineno` | Marks the end of the node (may be the same as `lineno`) | +| `end_colno` | Marks the end of the node (may be the same as `colno`) | + +Possible values for `node` with additional keys: + +| Node type | Additional keys | +| -------------------- | ------------------------------------------------ | +| `BooleanNode` | `value`: bool | +| `IdNode` | `value`: str | +| `NumberNode` | `value`: int | +| `StringNode` | `value`: str | +| `ContinueNode` | | +| `BreakNode` | | +| `ArgumentNode` | `positional`: node list; `kwargs`: accept_kwargs | +| `ArrayNode` | `args`: node | +| `DictNode` | `args`: node | +| `EmptyNode` | | +| `OrNode` | `left`: node; `right`: node | +| `AndNode` | `left`: node; `right`: node | +| `ComparisonNode` | `left`: node; `right`: node; `ctype`: str | +| `ArithmeticNode` | `left`: node; `right`: node; `op`: str | +| `NotNode` | `right`: node | +| `CodeBlockNode` | `lines`: node list | +| `IndexNode` | `object`: node; `index`: node | +| `MethodNode` | `object`: node; `args`: node; `name`: str | +| `FunctionNode` | `args`: node; `name`: str | +| `AssignmentNode` | `value`: node; `var_name`: str | +| `PlusAssignmentNode` | `value`: node; `var_name`: str | +| `ForeachClauseNode` | `items`: node; `block`: node; `varnames`: list | +| `IfClauseNode` | `ifs`: node list; `else`: node | +| `IfNode` | `condition`: node; `block`: node | +| `UMinusNode` | `right`: node | +| `TernaryNode` | `condition`: node; `true`: node; `false`: node | + +We do not guarantee the stability of this format since it is heavily +linked to the internal Meson AST. However, breaking changes (removal +of a node type or the removal of a key) are unlikely and will be +announced in the release notes. + + +# Existing integrations + +- [Gnome Builder](https://wiki.gnome.org/Apps/Builder) +- [KDevelop](https://www.kdevelop.org) +- [Eclipse CDT](https://www.eclipse.org/cdt/) (experimental) +- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) (currently unmaintained !!) +- [Meson-UI](https://github.com/michaelbadcrumble/meson-ui) (Meson build GUI) +- [Meson Syntax Highlighter](https://plugins.jetbrains.com/plugin/13269-meson-syntax-highlighter) plugin for JetBrains IDEs. +- [asabil.meson](https://open-vsx.org/extension/asabil/meson) extension for VS Code/Codium +- [Qt Creator](https://doc.qt.io/qtcreator/creator-project-meson.html) diff --git a/meson/docs/markdown/Icestorm-module.md b/meson/docs/markdown/Icestorm-module.md new file mode 100644 index 000000000..10b64eff1 --- /dev/null +++ b/meson/docs/markdown/Icestorm-module.md @@ -0,0 +1,27 @@ +# Unstable IceStorm module + +This module is available since version 0.45.0. + +**Note**: this module is unstable. It is only provided as a technology +preview. Its API may change in arbitrary ways between releases or it +might be removed from Meson altogether. + +## Usage + +This module provides an experimental method to create FPGA bitstreams +using the [IceStorm](http://www.clifford.at/icestorm/) suite of tools. + +The module exposes only one method called `project` and it is used +like this: + + is.project('projname', + , + constraint_file : , + ) + +The input to this function is the set of Verilog files and a +constraint file. This produces output files called `projname.asc`, +`projname.blif` and `projname.bin`. In addition it creates two run +targets called `projname-time` for running timing analysis and +`projname-upload` that uploads the generated bitstream to an FPGA +device using the `iceprog` programming executable. diff --git a/meson/docs/markdown/In-the-press.md b/meson/docs/markdown/In-the-press.md new file mode 100644 index 000000000..aa6f2a8eb --- /dev/null +++ b/meson/docs/markdown/In-the-press.md @@ -0,0 +1,8 @@ +# In the press + +This page lists cases where Meson has been presented in the press. + +* [Linux Magazin](http://www.linux-magazin.de/Ausgaben/2014/08/), in German, August 2014, and also later in [Linux Magazine](http://www.linux-magazine.com/Issues/2014/166/Meson-Build-System) in English +* [Admin Magazine](http://www.admin-magazine.com/HPC/Articles/The-Meson-Build-System) +* [Phoronix](https://www.phoronix.com/scan.php?page=news_item&px=MTc1MDc) regarding compilation of Mesa3D +* [CppCast](http://cppcast.com/2015/12/jussi-pakkanen/) interviewed Jussi about Meson for C++ development in 12/2015 diff --git a/meson/docs/markdown/Include-directories.md b/meson/docs/markdown/Include-directories.md new file mode 100644 index 000000000..6dfed5e48 --- /dev/null +++ b/meson/docs/markdown/Include-directories.md @@ -0,0 +1,30 @@ +--- +short-description: Instructions on handling include directories +... + +# Include directories + +Most `C`/`C++` projects have headers in different directories than +sources. Thus you need to specify include directories. Let's assume +that we are at some subdirectory and wish to add its `include` +subdirectory to some target's search path. To create a include +directory object we do this: + +```meson +incdir = include_directories('include') +``` + +The `incdir` variable now holds a reference to the `include` subdir. +Now we pass that as an argument to a build target: + +```meson +executable('someprog', 'someprog.c', include_directories : incdir) +``` + +Note that these two commands can be given in any subdirectories and it +will still work. Meson will keep track of the locations and generate +proper compiler flags to make it all work. + +Another thing to note is that `include_directories` adds both the +source directory and corresponding build directory to include path, so +you don't have to care. diff --git a/meson/docs/markdown/IndepthTutorial.md b/meson/docs/markdown/IndepthTutorial.md new file mode 100644 index 000000000..edbbe5508 --- /dev/null +++ b/meson/docs/markdown/IndepthTutorial.md @@ -0,0 +1,150 @@ +# An in-depth tutorial + +In this tutorial we set up a project with multiple targets, unit tests +and dependencies between targets. Our main product is a shared library +called *foo* that is written in `C++11`. We are going to ignore the +contents of the source files, as they are not really important from a +build definition point of view. The library makes use of the `GLib` +library so we need to detect and link it properly. We also make the +resulting library installable. + +The source tree contains three subdirectories `src`, `include` and +`test` that contain, respectively, the source code, headers and unit +tests of our project. + +To start things up, here is the top level `meson.build` file. + +```meson +project('c++ foolib', 'cpp', + version : '1.0.0', + license : 'MIT') +add_global_arguments('-DSOME_TOKEN=value', language : 'cpp') +glib_dep = dependency('glib-2.0') + +inc = include_directories('include') + +subdir('include') +subdir('src') +subdir('test') + +pkg_mod = import('pkgconfig') +pkg_mod.generate(libraries : foolib, + version : '1.0', + name : 'libfoobar', + filebase : 'foobar', + description : 'A Library to barnicate your foos.') +``` + +The definition always starts with a call to the `project` function. In +it you must specify the project's name and programming languages to +use, in this case only `C++`. We also specify two additional +arguments, the project's version and the license it is under. Our +project is version `1.0.0` and is specified to be under the MIT +license. + +Then we find GLib, which is an *external dependency*. The `dependency` +function tells Meson to find the library (by default using +`pkg-config`). If the library is not found, Meson will raise an error +and stop processing the build definition. + +Then we add a global compiler argument `-DSOME_TOKEN=value`. This flag +is used for *all* C++ source file compilations. It is not possible to +unset it for some targets. The reason for this is that it is hard to +keep track of what compiler flags are in use if global settings change +per target. + +Since `include` directory contains the header files, we need a way to +tell compilations to add that directory to the compiler command line. +This is done with the `include_directories` command that takes a +directory and returns an object representing this directory. It is +stored in variable `inc` which makes it accessible later on. + +After this are three `subdir` commands. These instruct Meson to go to +the specified subdirectory, open the `meson.build` file that's in +there and execute it. The last few lines are a stanza to generate a +`pkg-config` file. We'll skip that for now and come back to it at the +end of this document. + +The first subdirectory we go into is `include`. In it we have a a +header file for the library that we want to install. This requires one +line. + +```meson +install_headers('foolib.h') +``` + +This installs the given header file to the system's header directory. +This is by default `/[install prefix]/include`, but it can be changed +with a command line argument. + +The Meson definition of `src` subdir is simple. + +```meson +foo_sources = ['source1.cpp', 'source2.cpp'] +foolib = shared_library('foo', + foo_sources, + include_directories : inc, + dependencies : glib_dep, + install : true) +``` + +Here we just tell Meson to build the library with the given sources. +We also tell it to use the include directories we stored to variable +`inc` earlier. Since this library uses GLib, we tell Meson to add all +necessary compiler and linker flags with the `dependencies` keyword +argument. Its value is `glib_dep` which we set at the top level +`meson.build` file. The `install` argument tells Meson to install the +result. As with the headers, the shared library is installed to the +system's default location (usually `/[install prefix]/lib`) but is +again overridable. + +The resulting library is stored in variable `foolib` just like the +include directory was stored in the previous file. + +Once Meson has processed the `src` subdir it returns to the main Meson +file and executes the next line that moves it into the `test` subdir. +Its contents look like this. + +```meson +testexe = executable('testexe', 'footest.cpp', + include_directories : inc, + link_with : foolib) +test('foolib test', testexe) +``` + +First we build a test executable that has the same include directory +as the main library and which also links against the freshly built +shared library. Note that you don't need to specify `glib_dep` here +just to be able to use the built library `foolib`. If the executable +used GLib functionality itself, then we would of course need to add it +as a keyword argument here. + +Finally we define a test with the name `foolib test`. It consists of +running the binary we just built. If the executable exits with a zero +return value, the test is considered passed. Nonzero return values +mark the test as failed. + +At this point we can return to the pkg-config generator line. All +shared libraries should provide a pkg-config file, which explains how +that library is used. Meson provides this simple generator that should +be sufficient for most simple projects. All you need to do is list a +few basic pieces of information and Meson takes care of generating an +appropriate file. More advanced users might want to create their own +pkg-config files using Meson's [configuration file generator +system](Configuration.md). + +With these four files we are done. To configure, build and run the +test suite, we just need to execute the following commands (starting +at source tree root directory). + +```console +$ meson builddir && cd builddir +$ meson compile +$ meson test +``` + +To then install the project you only need one command. + +```console +$ meson install +``` diff --git a/meson/docs/markdown/Installing.md b/meson/docs/markdown/Installing.md new file mode 100644 index 000000000..0e5cb1296 --- /dev/null +++ b/meson/docs/markdown/Installing.md @@ -0,0 +1,149 @@ +--- +short-description: Installing targets +... + +# Installing + +Invoked via the [following command](Commands.md#install) *(available +since 0.47.0)*: + +```sh +meson install +``` + +or alternatively (on older Meson versions with `ninja` backend): + +```sh +ninja install +``` + +By default Meson will not install anything. Build targets can be +installed by tagging them as installable in the definition. + +```meson +project('install', 'c') +shared_library('mylib', 'libfile.c', install : true) +``` + +There is usually no need to specify install paths or the like. Meson +will automatically install it to the standards-conforming location. In +this particular case the executable is installed to the `bin` +subdirectory of the install prefix. However if you wish to override +the install dir, you can do that with the `install_dir` argument. + +```meson +executable('prog', 'prog.c', install : true, install_dir : 'my/special/dir') +``` + +Other install commands are the following. + +```meson +install_headers('header.h', subdir : 'projname') # -> include/projname/header.h +install_man('foo.1') # -> share/man/man1/foo.1 +install_data('datafile.dat', install_dir : get_option('datadir') / 'progname') +# -> share/progname/datafile.dat +``` + +`install_data()` supports rename of the file *since 0.46.0*. + +```meson +# file.txt -> {datadir}/{projectname}/new-name.txt +install_data('file.txt', rename : 'new-name.txt') + +# file1.txt -> share/myapp/dir1/data.txt +# file2.txt -> share/myapp/dir2/data.txt +install_data(['file1.txt', 'file2.txt'], + rename : ['dir1/data.txt', 'dir2/data.txt'], + install_dir : 'share/myapp') +``` + +Sometimes you want to copy an entire subtree directly. For this use +case there is the `install_subdir` command, which can be used like +this. + +```meson +install_subdir('mydir', install_dir : 'include') # mydir subtree -> include/mydir +``` + +Most of the time you want to install files relative to the install +prefix. Sometimes you need to go outside of the prefix (such as writing +files to `/etc` instead of `/usr/etc`. This can be accomplished by +giving an absolute install path. + +```meson +install_data(sources : 'foo.dat', install_dir : '/etc') # -> /etc/foo.dat +``` + +## Custom install behavior + +Sometimes you need to do more than just install basic targets. Meson +makes this easy by allowing you to specify a custom script to execute +at install time. As an example, here is a script that generates an +empty file in a custom directory. + +```bash +#!/bin/sh + +mkdir "${DESTDIR}/${MESON_INSTALL_PREFIX}/mydir" +touch "${DESTDIR}/${MESON_INSTALL_PREFIX}/mydir/file.dat" +``` + +As you can see, Meson sets up some environment variables to help you +write your script (`DESTDIR` is not set by Meson, it is inherited from +the outside environment). In addition to the install prefix, Meson +also sets the variables `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT`. + +Telling Meson to run this script at install time is a one-liner. + +```meson +meson.add_install_script('myscript.sh') +``` + +The argument is the name of the script file relative to the current +subdirectory. + +## DESTDIR support + +Sometimes you need to install to a different directory than the +install prefix. This is most common when building rpm or deb +packages. This is done with the `DESTDIR` environment variable and it +is used just like with other build systems: + +```console +$ DESTDIR=/path/to/staging/area meson install +``` + +## Custom install behaviour + +Installation behaviour can be further customized using additional +arguments. + +For example, if you wish to install the current setup without +rebuilding the code (which the default install target always does) and +only installing those files that have changed, you would run this +command in the build tree: + +```console +$ meson install --no-rebuild --only-changed +``` + +## Finer control over install locations + +Sometimes it is necessary to only install a subsection of output files +or install them in different directories. This can be done by +specifying `install_dir` as an array rather than a single string. The +array must have as many items as there are outputs and each entry +specifies how the corresponding output file should be installed. For +example: + +```meson +custom_target(... + output: ['file1', 'file2', 'file3'], + install_dir: ['path1', false, 'path3'], + ... +) +``` + +In this case `file1` would be installed to `/prefix/path1/file1`, +`file2` would not be installed at all and `file3` would be installed +to `/prefix/path3/file3'. diff --git a/meson/docs/markdown/Java.md b/meson/docs/markdown/Java.md new file mode 100644 index 000000000..f0dd40380 --- /dev/null +++ b/meson/docs/markdown/Java.md @@ -0,0 +1,24 @@ +--- +title: Java +short-description: Compiling Java programs +... + +# Compiling Java applications + +Meson has experimental support for compiling Java programs. The basic +syntax consists of only one function and would be used like this: + +```meson +project('javaprog', 'java') + +myjar = jar('mything', 'com/example/Prog.java', + main_class : 'com.example.Prog') + +test('javatest', myjar) +``` + +However note that Meson places limitations on how you lay out your code. + +* all Java files for a jar must be under the subdirectory the jar definition is in +* all Java files must be in paths specified by their package, e.g. a class called `com.example.Something` must be in a Java file situated at `com/example/Something.java`. +* Meson only deals with jar files, you cannot poke individual class files (unless you do so manually) diff --git a/meson/docs/markdown/Keyval-module.md b/meson/docs/markdown/Keyval-module.md new file mode 100644 index 000000000..96d9c1566 --- /dev/null +++ b/meson/docs/markdown/Keyval-module.md @@ -0,0 +1,58 @@ +--- +short-description: Keyval module +authors: + - name: Mark Schulte, Paolo Bonzini + years: [2017, 2019] + has-copyright: false +... + +# keyval module + +This module parses files consisting of a series of `key=value` lines. +One use of this module is to load kconfig configurations in Meson +projects. + +**Note**: this does not provide kconfig frontend tooling to generate a +configuration. You still need something such as kconfig frontends (see +link below) to parse your Kconfig files, and then (after you've chosen +the configuration options), output a ".config" file. + + [kconfig-frontends]: http://ymorin.is-a-geek.org/projects/kconfig-frontends + +## Usage + +The module may be imported as follows: + +``` meson +keyval = import('keyval') +``` + +The following functions will then be available as methods on the object +with the name `keyval`. You can, of course, replace the name +`keyval` with anything else. + +### keyval.load() + +This function loads a file consisting of a series of `key=value` lines +and returns a dictionary object. + +`keyval.load()` makes no attempt at parsing the values in the file. In +particular boolean and integer values will be represented as strings, +and strings will keep any quoting that is present in the input file. +It can be useful to create a +[`configuration_data()`](#configuration_data) object from the +dictionary and use methods such as `get_unquoted()`. + +Kconfig frontends usually have ".config" as the default name for the +configuration file. However, placing the configuration file in the +source directory limits the user to one configuration per source +directory. In order to allow separate configurations for each build +directory, as is the Meson standard, `meson.build` should not hardcode +".config" as the argument to `kconfig.load()`, and should instead make +the argument to `kconfig.load()` a [project build +option](Build-options.md). + +* The first (and only) argument is the path to the configuration file to + load (usually ".config"). + +**Returns**: a [dictionary object](Reference-manual.md#dictionary-object). diff --git a/meson/docs/markdown/Localisation.md b/meson/docs/markdown/Localisation.md new file mode 100644 index 000000000..a826f8dfd --- /dev/null +++ b/meson/docs/markdown/Localisation.md @@ -0,0 +1,86 @@ +--- +short-description: Localization with GNU Gettext +... + +# Localisation + +Localising your application with GNU gettext takes a little effort but +is quite straightforward. We'll create a `po` subdirectory at your +project root directory for all the localisation info. + +## Generating .pot and .po files + +In your main meson.build file include the `po` subdirectory in the build process. + + subdir('po') + +In this `po` subdirectory we need: +- `LINGUAS`: Space separated list of languages +- `POTFILES`: List of source files to scan for translatable strings. +- `meson.build`: Localization specific Meson file + +### LINGUAS + +File with space separated list of languages. A sample LINGUAS might look like this. + + aa ab ae af + +### POTFILES + +File that lists all the source files that gettext should scan in order +to find strings to translate. The syntax of the file is one line per +source file and the line must contain the relative path from source +root. A sample POTFILES might look like this. + + src/file1.c + src/file2.c + src/subdir/file3.c + include/mything/somefile.h + +### meson.build + +Localization specific Meson file. It imports and uses the `i18n` +module. If not defined before it needs to define the `GETTEXT_PACKAGE` +global. + +```meson +i18n = import('i18n') +# define GETTEXT_PACKAGE +add_project_arguments('-DGETTEXT_PACKAGE="intltest"', language:'c') +i18n.gettext(meson.project_name(), + args: '--directory=' + meson.source_root() +) +``` + +The first command imports the `i18n` module that provides gettext +features. The fourth line does the actual invocation. The first +argument is the gettext package name. This causes two things to +happen. The first is that Meson will generate binary mo files and put +them to their proper locations when doing an install. The second is +that it creates a build rule to regenerate the main pot file. If you +are using the Ninja backend, this is how you would invoke the rebuild. + +### generate .pot file + +Then we need to generate the main pot file. The potfile can have any +name but is usually the name of the gettext package. Let's say the +project is called *intltest*. In this case the corresponding pot file +would be called `intltest.pot`. + +Run the following command from your build folder to generate the pot +file. It is recommended to inspect it manually afterwards and fill in +e.g. proper copyright and contact information. + +```console +$ meson compile intltest-pot +``` + +### generate .po files + +For each language listed in the array above we need a corresponding +`.po` file. Those can be generated by running the following command +from your build folder. + +```console +$ meson compile intltest-update-po +``` diff --git a/meson/docs/markdown/Machine-files.md b/meson/docs/markdown/Machine-files.md new file mode 100644 index 000000000..631408240 --- /dev/null +++ b/meson/docs/markdown/Machine-files.md @@ -0,0 +1,359 @@ +# Cross and Native File reference + +Cross and native files are nearly identical, but not completely. This +is the documentation on the common values used by both, for the +specific values of one or the other see the [cross +compilation](Cross-compilation.md) and [native +environments](Native-environments.md). + +*Changed in 0.56.0* Keys within sections are now case sensitive. This +is required to make project options work correctly. + +## Data Types + +There are four basic data types in a machine file: +- strings +- arrays +- booleans +- integers + +A string is specified single quoted: +```ini +[section] +option1 = 'false' +option2 = '2' +``` + +An array is enclosed in square brackets, and must consist of strings or booleans +```ini +[section] +option = ['value'] +``` + +A boolean must be either `true` or `false`, and unquoted. +```ini +option = false +``` + +An integer must be an unquoted numeric constant. +```ini +option = 42 +``` + +## Sections + +The following sections are allowed: +- constants +- binaries +- paths +- properties +- cmake +- project options +- built-in options + +### constants + +*Since 0.56.0* + +String and list concatenation is supported using the `+` operator, +joining paths is supported using the `/` operator. Entries defined in +the `[constants]` section can be used in any other section (they are +always parsed first), entries in any other section can be used only +within that same section and only after it has been defined. + +```ini +[constants] +toolchain = '/toolchain' +common_flags = ['--sysroot=' + toolchain / 'sysroot'] + +[properties] +c_args = common_flags + ['-DSOMETHING'] +cpp_args = c_args + ['-DSOMETHING_ELSE'] + +[binaries] +c = toolchain / 'gcc' +``` + +This can be useful with cross file composition as well. A generic +cross file could be composed with a platform specific file where +constants are defined: + +```ini +# aarch64.ini +[constants] +arch = 'aarch64-linux-gnu' +``` + +```ini +# cross.ini +[binaries] +c = arch + '-gcc' +cpp = arch + '-g++' +strip = arch + '-strip' +pkgconfig = arch + '-pkg-config' +... +``` + +This can be used as `meson setup --cross-file aarch64.ini --cross-file +cross.ini builddir`. + +Note that file composition happens before the parsing of values. The +example below results in `b` being `'HelloWorld'`: + +```ini +# file1.ini: +[constants] +a = 'Foo' +b = a + 'World' +``` + +```ini +#file2.ini: +[constants] +a = 'Hello' +``` + +The example below results in an error when file1.ini is included +before file2.ini because `b` would be defined before `a`: + +```ini +# file1.ini: +[constants] +b = a + 'World' +``` + +```ini +#file2.ini: +[constants] +a = 'Hello' +``` + +### Binaries + +The binaries section contains a list of binaries. These can be used +internally by Meson, or by the `find_program` function. + +These values must be either strings or an array of strings + +Compilers and linkers are defined here using `` and `_ld`. +`_ld` is special because it is compiler specific. For compilers +like gcc and clang which are used to invoke the linker this is a value +to pass to their "choose the linker" argument (-fuse-ld= in this +case). For compilers like MSVC and Clang-Cl, this is the path to a +linker for Meson to invoke, such as `link.exe` or `lld-link.exe`. +Support for `ld` is *new in 0.53.0* + +*changed in 0.53.1* the `ld` variable was replaced by `_ld`, +because it regressed a large number of projects. in 0.53.0 the `ld` +variable was used instead. + +Native example: + +```ini +c = '/usr/bin/clang' +c_ld = 'lld' +sed = 'C:\\program files\\gnu\\sed.exe' +llvm-config = '/usr/lib/llvm8/bin/llvm-config' +``` + +Cross example: + +```ini +c = ['ccache', '/usr/bin/i586-mingw32msvc-gcc'] +cpp = ['ccache', '/usr/bin/i586-mingw32msvc-g++'] +c_ld = 'gold' +cpp_ld = 'gold' +ar = '/usr/i586-mingw32msvc/bin/ar' +strip = '/usr/i586-mingw32msvc/bin/strip' +pkgconfig = '/usr/bin/i586-mingw32msvc-pkg-config' +``` + +An incomplete list of internally used programs that can be overridden +here is: + +- cmake +- cups-config +- gnustep-config +- gpgme-config +- libgcrypt-config +- libwmf-config +- llvm-config +- pcap-config +- pkgconfig +- sdl2-config +- wx-config (or wx-3.0-config or wx-config-gtk) + +### Paths and Directories + +*Deprecated in 0.56.0* use the built-in section instead. + +As of 0.50.0 paths and directories such as libdir can be defined in +the native and cross files in a paths section. These should be +strings. + +```ini +[paths] +libdir = 'mylibdir' +prefix = '/my prefix' +``` + +These values will only be loaded when not cross compiling. Any +arguments on the command line will override any options in the native +file. For example, passing `--libdir=otherlibdir` would result in a +prefix of `/my prefix` and a libdir of `otherlibdir`. + +### Properties + +*New in native files in 0.54.0*, always in cross files. + +In addition to special data that may be specified in cross files, this +section may contain random key value pairs accessed using the +`meson.get_external_property()`, or `meson.get_cross_property()`. + +*Changed in 0.56.0* putting `_args` and `_link_args` in +the properties section has been deprecated, and should be put in the +built-in options section. + +#### Supported properties + +This is a non exhaustive list of supported variables in the `[properties]` +section. + +- `cmake_toolchain_file` specifies an absolute path to an already existing + CMake toolchain file that will be loaded with `include()` as the last + instruction of the automatically generated CMake toolchain file from Meson. + (*new in 0.56.0*) +- `cmake_defaults` is a boolean that specifies whether Meson should automatically + generate default toolchain variables from other sections (`binaries`, + `host_machine`, etc.) in the machine file. Defaults are always overwritten + by variables set in the `[cmake]` section. The default is `true`. (*new in 0.56.0*) +- `cmake_skip_compiler_test` is an enum that specifies when Meson should + automatically generate toolchain variables to skip the CMake compiler + sanity checks. This only has an effect if `cmake_defaults` is `true`. + Supported values are `always`, `never`, `dep_only`. The default is `dep_only`. + (*new in 0.56.0*) +- `cmake_use_exe_wrapper` is a boolean that controls whether to use the + `exe_wrapper` specified in `[binaries]` to run generated executables in CMake + subprojects. This setting has no effect if the `exe_wrapper` was not specified. + The default value is `true`. (*new in 0.56.0*) +- `java_home` is an absolute path pointing to the root of a Java installation. + +### CMake variables + +*New in 0.56.0* + +All variables set in the `[cmake]` section will be added to the +generate CMake toolchain file used for both CMake dependencies and +CMake subprojects. The type of each entry must be either a string or a +list of strings. + +**Note:** All occurrences of `\` in the value of all keys will be replaced with + a `/` since CMake has a lot of issues with correctly escaping `\` when + dealing with variables (even in cases where a path in `CMAKE_C_COMPILER` + is correctly escaped, CMake will still trip up internaly for instance) + + A custom toolchain file should be used (via the `cmake_toolchain_file` + property) if `\` support is required. + +```ini +[cmake] + +CMAKE_C_COMPILER = '/usr/bin/gcc' +CMAKE_CXX_COMPILER = 'C:\\user\\bin\\g++' +CMAKE_SOME_VARIABLE = ['some', 'value with spaces'] +``` + +For instance, the `[cmake]` section from above will generate the +following code in the CMake toolchain file: + +```cmake +set(CMAKE_C_COMPILER "/usr/bin/gcc") +set(CMAKE_CXX_COMPILER "C:/usr/bin/g++") +set(CMAKE_SOME_VARIABLE "some" "value with spaces") +``` + +### Project specific options + +*New in 0.56.0* + +Path options are not allowed, those must be set in the `[paths]` +section. + +Being able to set project specific options in a cross or native file +can be done using the `[project options]` section of the specific file +(if doing a cross build the options from the native file will be +ignored) + +For setting options in subprojects use the `[:project +options]` section instead. + +```ini +[project options] +build-tests = true + +[zlib:project options] +build-tests = false +``` + +### Meson built-in options + +Meson built-in options can be set the same way: + +```ini +[built-in options] +c_std = 'c99' +``` + +You can set some Meson built-in options on a per-subproject basis, +such as `default_library` and `werror`. The order of precedence is: + +1) Command line +2) Machine file +3) Build system definitions + +```ini +[zlib:built-in options] +default_library = 'static' +werror = false +``` + +Options set on a per-subproject basis will inherit the option from the +parent if the parent has a setting but the subproject doesn't, even +when there is a default set Meson language. + +```ini +[built-in options] +default_library = 'static' +``` + +will make subprojects use default_library as static. + +Some options can be set on a per-machine basis (in other words, the +value of the build machine can be different than the host machine in a +cross compile). In these cases the values from both a cross file and a +native file are used. + +An incomplete list of options is: +- pkg_config_path +- cmake_prefix_path + +## Loading multiple machine files + +Native files allow layering (cross files can be layered since Meson +0.52.0). More than one file can be loaded, with values from a previous +file being overridden by the next. The intention of this is not +overriding, but to allow composing files. This composition is done by +passing the command line argument multiple times: + +```console +meson setup builddir/ --cross-file first.ini --cross-file second.ini --cross-file thrid.ini +``` + +In this case `first.ini` will be loaded, then `second.ini`, with +values from `second.ini` replacing `first.ini`, and so on. + +For example, if there is a project using C and C++, python 3.4-3.7, +and LLVM 5-7, and it needs to build with clang 5, 6, and 7, and gcc +5.x, 6.x, and 7.x; expressing all of these configurations in +monolithic configurations would result in 81 different native files. +By layering them, it can be expressed by just 12 native files. diff --git a/meson/docs/markdown/Manual.md b/meson/docs/markdown/Manual.md new file mode 100644 index 000000000..988efa162 --- /dev/null +++ b/meson/docs/markdown/Manual.md @@ -0,0 +1,9 @@ +--- +short-description: User manual for Meson +... + +# Manual + +This is the user manual for Meson. It currently tracks the state of +Git head. If you are using an older version, some of the information +here might not work for you. diff --git a/meson/docs/markdown/Meson-sample.md b/meson/docs/markdown/Meson-sample.md new file mode 100644 index 000000000..f504cd989 --- /dev/null +++ b/meson/docs/markdown/Meson-sample.md @@ -0,0 +1,80 @@ +--- +short-description: Simple project step by step explanation +... + +# Meson sample + +A Meson file that builds an executable looks like this. + +```meson +project('simple', 'c') +executable('myexe', 'source.c') +``` + +All Meson build definitions begin with the `project` command. It +specifies the name of the project and what programming languages it +uses. Here the project is called *simple* and it uses only the C +programming language. All strings are single-quoted. + +On the next line we define a *build target*, in this case an +executable called *myexe*. It consists of one source file. This is all +the code that a user needs to write to compile an executable with +Meson. + +Variables are fully supported. The above code snippet could also have +been declared like this. + +```meson +project('simple', 'c') +src = 'source.c' +executable('myexe', src) +``` + +Most executables consist of more than one source file. The easiest way +to deal with this is to put them in an array. + +```meson +project('simple', 'c') +src = ['source1.c', 'source2.c', 'source3.c'] +executable('myexe', src) +``` + +Meson also supports the notion of *keyword arguments*. Indeed most +arguments to functions can only be passed using them. The above +snippet could be rewritten like this. + +```meson +project('simple', 'c') +src = ['source1.c', 'source2.c', 'source3.c'] +executable('myexe', sources : src) +``` + +These two formats are equivalent and choosing one over the other is +mostly a question of personal preference. + +The `executable` command actually returns an *executable object*, +which represents the given build target. It can be passed on to other +functions, like this. + +```meson +project('simple', 'c') +src = ['source1.c', 'source2.c', 'source3.c'] +exe = executable('myexe', src) +test('simple test', exe) +``` + +Here we create a unit test called *simple test*, and which uses the +built executable. When the tests are run with the `meson test` +command, the built executable is run. If it returns zero, the test +passes. A non-zero return value indicates an error, which Meson will +then report to the user. + +A note to Visual Studio users +----- + +There's a slight terminology difference between Meson and Visual +Studio. A Meson *project* is the equivalent to a Visual Studio +*solution*. That is, the topmost thing that encompasses all things to +be built. A Visual Studio *project* on the other hand is the +equivalent of a Meson top level build target, such as an executable or +a shared library. diff --git a/meson/docs/markdown/MesonCI.md b/meson/docs/markdown/MesonCI.md new file mode 100644 index 000000000..65175afee --- /dev/null +++ b/meson/docs/markdown/MesonCI.md @@ -0,0 +1,43 @@ +# Meson CI setup + +This document is aimed for Meson contributors and documents the CI +setup used for testing Meson itself. The Meson project uses multiple +CI platforms for covering a wide range of target systems. + +## GitHub actions + +The configuration files for GitHub actions are located in +`.github/workflows`. Here, all [images](#docker-images) are tested +with the full `run_tests.py` run. Additionally, some other, smaller, +tests are run. + +## Docker images + +The Linux docker images are automatically built and uploaded by GitHub +actions. An image rebuild is triggerd when any of the image definition +files are changed (in `ci/ciimage`) in the master branch. +Additionally, the images are also updated weekly. + +Each docker image has one corresponding dirctory in `ci/ciimage` with +an `image.json` and an `install.sh`. + +### Image generation + +There are no manual Dockerfiles. Instead the Dockerfile is +automatically generated by the `build.py` script. This is done to +ensure that all images have the same layout and can all be built and +tested automatically. + +The Dockerfile is generated from the `image.json` file and basically +only adds a few common files and runs the `install.sh` script which +should contain all distribution specific setup steps. The `common.sh` +can be sourced via `source /ci/common.sh` to access some shared +functionalety. + +To generate the image run `build.py -t build `. A generated +image can be tested with `build.py -t test `. + +### Common image setup + +Each docker image has a `/ci` directory with an `env_vars.sh` script. +This script has to be sourced before running the Meson test suite. diff --git a/meson/docs/markdown/Mixing-build-systems.md b/meson/docs/markdown/Mixing-build-systems.md new file mode 100644 index 000000000..683006410 --- /dev/null +++ b/meson/docs/markdown/Mixing-build-systems.md @@ -0,0 +1,55 @@ +# Meson's policy on mixing multiple build systems in one build directory + +Meson has been designed with the principle that all dependencies are +either provided by "the platform" via a mechanism such as Pkg-Config +or that they are built as Meson subprojects under the main project. +There are several projects that would like to mix build systems, that +is, build dependencies in the same build directory as the other build +system by having one build system call the other. The build +directories do not necessarily need to be inside each other, but that +is the common case. + +This page lists the Meson project's stance on mixing build systems. +The tl/dr version is that while we do provide some functionality for +this use case, it only works for simple cases. Anything more complex +can not be made reliable and trying to do that would burden Meson +developers with an effectively infinite maintenance burden. Thus these +use cases are not guaranteed to work, and even if a project using them +works today there are no guarantees that it will work in any future +version. + +## The definition of "build system mixing" + +For the purposes of this page, mixing build systems means any and all +mechanisms where one build system uses build artifacts from a +different build system's build directory in any way. + +Note that this definition does not specify what the dependencies are +and how they are built, only how they are consumed. For example +suppose you have a standalone dependency library that builds with +build system X. In this case having Meson call the build system to +build the dependency at build time would be interpreted as mixing +build systems. On the other hand a "Flatpak-like" approach of building +and installing the library with an external mechanism and consuming it +via a standard build-system agnostic method such as Pkg-Config would +not be considered build system mixing. Use of uninstalled-pkgconfig +files is considered mixing, though. + +## What does this mean for support and compatibility? + +The Meson project will not take on any maintenance burden to ensure +anything other than the simple builds setups as discussed above will +work. Nor will we make changes to support these use cases that would +worsen the user experience of users of plain Meson. This includes, but +is not limited to, the following: + +- Any changes in other build systems that cause mixed project breakage + will not be considered a bug in Meson. + +- Breakages in mixed build projects will not be considered regressions + and such problems will never be considered release blockers, + regardless of what the underlying issue is. + +- Any use case that would require major changes in Meson to work + around missing or broken functionality in the other build system is + not supported. These issues must be fixed upstream. diff --git a/meson/docs/markdown/Modules.md b/meson/docs/markdown/Modules.md new file mode 100644 index 000000000..a96668289 --- /dev/null +++ b/meson/docs/markdown/Modules.md @@ -0,0 +1,25 @@ +--- +short-description: Meson modules for common build operations +... + +# Modules + +In addition to core language features, Meson also provides a module +system aimed at providing helper methods for common build operations. +Using modules is simple, first you import them: + +```meson +mymod = import('somemodule') +``` + +After this you can use the returned object to use the functionality +provided: + +```meson +mymod.do_something('text argument') +``` + +Meson has a selection of modules to make common requirements easy to +use. Modules can be thought of like the standard library of a +programming language. Currently Meson provides the modules listed on +subpages. diff --git a/meson/docs/markdown/Native-environments.md b/meson/docs/markdown/Native-environments.md new file mode 100644 index 000000000..d79313224 --- /dev/null +++ b/meson/docs/markdown/Native-environments.md @@ -0,0 +1,50 @@ +--- +short-description: Setting up native compilation +... + +# Persistent native environments + +New in 0.49.0 + +Meson has [cross files for describing cross compilation +environments](Cross-compilation.md), for describing native +environments it has equivalent "native files". + +Natives describe the *build machine*, and can be used to override +properties of non-cross builds, as well as properties that are marked +as "native" in a cross build. + +There are a couple of reasons you might want to use a native file to +keep a persistent environment: + +* To build with a non-default native tool chain (such as clang instead of gcc) +* To use a non-default version of another binary, such as yacc, or llvm-config + +## Changing native file settings + +All of the rules about cross files and changed settings apply to native files +as well, see [here](Cross-compilation.md#changing-cross-file-settings) + +## Defining the environment + +See the [config-files section](Machine-files.md), for options shared by cross +and native files. + +## Native file locations + +Like cross files, native files may be installed to user or system wide +locations, defined as: + - $XDG_DATA_DIRS/meson/native + (/usr/local/share/meson/native:/usr/share/meson/native if $XDG_DATA_DIRS is + undefined) + - $XDG_DATA_HOME/meson/native ($HOME/.local/share/meson/native if + $XDG_DATA_HOME is undefined) + +The order of locations tried is as follows: + - A file relative to the local dir + - The user local location + - The system wide locations in order + +These files are not intended to be shipped by distributions, unless +they are specifically for distribution packaging, they are mainly +intended for developers. diff --git a/meson/docs/markdown/Overview.md b/meson/docs/markdown/Overview.md new file mode 100644 index 000000000..7bee93789 --- /dev/null +++ b/meson/docs/markdown/Overview.md @@ -0,0 +1,58 @@ +--- +short-description: Overview of the Meson build system +... + +# Overview + +Meson is a build system that is designed to be as user-friendly as +possible without sacrificing performance. The main tool for this is a +custom language that the user uses to describe the structure of his +build. The main design goals of this language has been simplicity, +clarity and conciseness. Much inspiration was drawn from the Python +programming language, which is considered very readable, even to +people who have not programmed in Python before. + +Another main idea has been to provide first class support for modern +programming tools and best practices. These include features as varied +as unit testing, code coverage reporting, precompiled headers and the +like. All of these features should be immediately available to any +project using Meson. The user should not need to hunt for third party +macros or write shell scripts to get these features. They should just +work out of the box. + +This power should not come at the expense of limited usability. Many +software builds require unorthodox steps. A common example is that you +first need to build a custom tool and then use that tool to generate +more source code to build. This functionality needs to be supported +and be as easy to use as other parts of the system. + +Terminology +-- + +Meson follows the overall structure of other popular build systems, +such as CMake and GNU Autotools. This means that the build is divided +into two discrete steps: *configure step* and *build step*. The first +step inspects the system, checks for dependencies and does all other +steps necessary to configure the build. It then generates the actual +build system. The second step is simply executing this generated build +system. The end result is a bunch of *build targets*, which are +usually executables and shared and static libraries. + +The directory that contains the source code is called the *source +directory*. Correspondingly the directory where the output is written +is called the *build directory*. In other build systems it is common +to have these two be the same directory. This is called an *in-source +build*. The case where the build directory is separate is called an +*out-of-source build*. + +What sets Meson apart from most build systems is that it enforces a +separate build directory. All files created by the build system are +put in the build directory. It is actually impossible to do an +in-source build. For people used to building inside their source tree, +this may seem like a needless complication. However there are several +benefits to doing only out-of-source builds. These will be explained +in the next chapter. + +When the source code is built, a set of *unit tests* is usually +run. They ensure that the program is working as it should. If it does, +the build result can be *installed* after which it is ready for use. diff --git a/meson/docs/markdown/Performance-comparison.md b/meson/docs/markdown/Performance-comparison.md new file mode 100644 index 000000000..2f47d114a --- /dev/null +++ b/meson/docs/markdown/Performance-comparison.md @@ -0,0 +1,7 @@ +# Performance comparison + +This page lists experiments comparing build performance between Meson +and other build systems. + +- [Simple comparison](Simple-comparison.md) +- [ARM performance test](ARM-performance-test.md) diff --git a/meson/docs/markdown/Pkg-config-files.md b/meson/docs/markdown/Pkg-config-files.md new file mode 100644 index 000000000..1fbef0bab --- /dev/null +++ b/meson/docs/markdown/Pkg-config-files.md @@ -0,0 +1,30 @@ +# Pkg config files + +[Pkg-config](https://en.wikipedia.org/wiki/Pkg-config) is a way for +shared libraries to declare the compiler flags needed to use them. +There are two different ways of generating Pkg-config files in Meson. +The first way is to build them manually with the `configure_file` +command. The second way is to use Meson's built in Pkg-config file +generator. The difference between the two is that the latter is very +simple and meant for basic use cases. The former should be used when +you need to provide a more customized solution. + +In this document we describe the simple generator approach. It is used in the following way. + +```meson +pkg = import('pkgconfig') +libs = ... # the library/libraries users need to link against +h = ['.', ...] # subdirectories of ${prefix}/${includedir} to add to header path +pkg.generate(libraries : libs, + subdirs : h, + version : '1.0', + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.') +``` + +This causes a file called `simple.pc` to be created and placed into +the install directory during the install phase. + +More information on the pkg-config module and the parameters can be +found on the [pkgconfig-module](Pkgconfig-module.md) page. diff --git a/meson/docs/markdown/Pkgconfig-module.md b/meson/docs/markdown/Pkgconfig-module.md new file mode 100644 index 000000000..0d1e859eb --- /dev/null +++ b/meson/docs/markdown/Pkgconfig-module.md @@ -0,0 +1,127 @@ +# Pkgconfig module + +This module is a simple generator for +[pkg-config](https://pkg-config.freedesktop.org/) files. + +## Usage + +```meson +pkg = import('pkgconfig') +bar_dep = dependency('bar') +lib = library('foo', dependencies : [bar]) +pkg.generate(lib) +``` + +### pkg.generate() + +The generated file's properties are specified with the following +keyword arguments. + +- `description` a string describing the library, used to set the `Description:` field +- `extra_cflags` a list of extra compiler flags to be added to the + `Cflags` field after the header search path +- `filebase` the base name to use for the pkg-config file; as an + example, the value of `libfoo` would produce a pkg-config file called + `libfoo.pc` +- `install_dir` the directory to install to, defaults to the value of + option `libdir` followed by `/pkgconfig` +- `libraries` a list of built libraries (usually results of + shared_library) that the user needs to link against. Arbitrary strings can + also be provided and they will be added into the `Libs` field. Since 0.45.0 + dependencies of built libraries will be automatically added, see the + [Implicit dependencies](#implicit-dependencies) section below for the exact + rules. Since 0.58.0 custom_target() objects are supported as long as they are + linkable (has known extension such as `.a`, `.so`, etc). +- `libraries_private` list of built libraries or strings to put in the + `Libs.private` field. Since 0.45.0 dependencies of built libraries will be + automatically added, see the [Implicit dependencies](#implicit-dependencies) + section below for the exact rules. Since 0.58.0 custom_target() objects are + supported as long as they are linkable (has known extension such as `.a`, + `.so`, etc). +- `name` the name of this library, used to set the `Name:` field +- `subdirs` which subdirs of `include` should be added to the header + search path, for example if you install headers into + `${PREFIX}/include/foobar-1`, the correct value for this argument + would be `foobar-1` +- `requires` list of strings, pkgconfig-dependencies or libraries that + `pkgconfig.generate()` was used on to put in the `Requires` field +- `requires_private` same as `requires` but for `Requires.private` field + field +- `url` a string with a url for the library +- `variables` a list of strings with custom variables to add to the + generated file. The strings must be in the form `name=value` and may + reference other pkgconfig variables, + e.g. `datadir=${prefix}/share`. The names `prefix`, `libdir` and + `includedir` are reserved and may not be used. *Since 0.56.0* it can also be a + dictionary but ordering of Meson dictionaries are not guaranteed, which could + cause issues when some variables reference other variables. + Spaces in values are escaped with `\`, this is required in the case the value is + a path that and is used in `cflags` or `libs` arguments. *Since 0.59.0* if + escaping is not desired (e.g. space separate list of values) `unescaped_variables` + keyword argument should be used instead. +- `uninstalled_variables` used instead of the `variables` keyword argument, when + generating the uninstalled pkg-config file. Since *0.54.0* + Spaces in values are escaped with `\`, this is required in the case the value is + a path that and is used in `cflags` or `libs` arguments. *Since 0.59.0* if + escaping is not desired (e.g. space separate list of values) + `unescaped_uninstalled_variables` keyword argument should be used instead. +- `version` a string describing the version of this library, used to set the + `Version:` field. (*since 0.46.0*) Defaults to the project version if unspecified. +- `d_module_versions` a list of module version flags used when compiling + D sources referred to by this pkg-config file +- `dataonly` field. (*since 0.54.0*) this is used for architecture-independent + pkg-config files in projects which also have architecture-dependent outputs. +- `conflicts` (*since 0.36.0, incorrectly issued a warning prior to 0.54.0*) list of strings to be put in the `Conflicts` field. + +Since 0.46 a `StaticLibrary` or `SharedLibrary` object can optionally +be passed as first positional argument. If one is provided a default +value will be provided for all required fields of the pc file: +- `install_dir` is set to `pkgconfig` folder in the same location than the provided library. +- `description` is set to the project's name followed by the library's name. +- `name` is set to the library's name. + +Since 0.54.0 uninstalled pkg-config files are generated as well. They +are located in `/meson-uninstalled/`. It is sometimes +useful to build projects against libraries built by Meson without +having to install them into a prefix. In order to do so, just set +`PKG_CONFIG_PATH=/meson-uninstalled` before building your +application. That will cause pkg-config to prefer those +`-uninstalled.pc` files and find libraries and headers from the Meson +builddir. This is an experimental feature provided on a best-effort +basis, it might not work in all use-cases. + +### Implicit dependencies + +The exact rules followed to find dependencies that are implicitly +added into the pkg-config file have evolved over time. Here are the +rules as of Meson *0.49.0*, previous versions might have slightly +different behaviour. + +- Not found libraries or dependencies are ignored. +- Libraries and dependencies are private by default (i.e. added into + `Requires.private:` or `Libs.private:`) unless they are explicitly added in + `libraries` or `requires` keyword arguments, or is the main library (first + positional argument). +- Libraries and dependencies will be de-duplicated, if they are added in both + public and private (e.g `Requires:` and `Requires.private:`) it will be removed + from the private list. +- Shared libraries (i.e. `shared_library()` and **NOT** `library()`) add only + `-lfoo` into `Libs:` or `Libs.private:` but their dependencies are not pulled. + This is because dependencies are only needed for static link. +- Other libraries (i.e. `static_library()` or `library()`) add `-lfoo` into `Libs:` + or `Libs.private:` and recursively add their dependencies into `Libs.private:` or + `Requires.private:`. +- Dependencies provided by pkg-config are added into `Requires:` or + `Requires.private:`. If a version was specified when declaring that dependency + it will be written into the generated file too. +- The threads dependency (i.e. `dependency('threads')`) adds `-pthread` into + `Libs:` or `Libs.private:`. +- Internal dependencies (i.e. + `declare_dependency(compiler_args : '-DFOO', link_args : '-Wl,something', link_with : foo)`) + add `compiler_args` into `Cflags:` if public, `link_args` and `link_with` into + `Libs:` if public or `Libs.private:` if private. +- Other dependency types add their compiler arguments into `Cflags:` if public, + and linker arguments into `Libs:` if public or `Libs.private:` if private. +- Once a pkg-config file is generated for a library using `pkg.generate(mylib)`, + any subsequent call to `pkg.generate()` where mylib appears, will generate a + `Requires:` or `Requires.private` instead of a `Libs:` or `Libs.private:`. diff --git a/meson/docs/markdown/Playground.md b/meson/docs/markdown/Playground.md new file mode 100644 index 000000000..906197841 --- /dev/null +++ b/meson/docs/markdown/Playground.md @@ -0,0 +1,35 @@ +# playground + +This page is *not* part of official documentation. It exists merely +for testing new stuff for the wiki. + +## Ref manual reformat + +The current format is not very readable. We should have something more +like what +[glib](https://developer.gnome.org/glib/stable/glib-Hash-Tables.html) +or [Python](https://docs.python.org/3/library/os.html) do. + +Here's a first proposal. + + project(, + , + version : , + subproject_dir : , + meson_version : , + license : , + default_options : , + +Longer descriptions of arguments go here. + +Take two: + +## project + + + + version : + subproject_dir : + meson_version : + license : + default_options : diff --git a/meson/docs/markdown/Porting-from-autotools.md b/meson/docs/markdown/Porting-from-autotools.md new file mode 100644 index 000000000..34e1bbd3a --- /dev/null +++ b/meson/docs/markdown/Porting-from-autotools.md @@ -0,0 +1,700 @@ +# Porting from Autotools + +This page uses +[AppStream-glib](https://github.com/hughsie/appstream-glib/) as an +example project. AppStream-Glib contains some libraries, GObject +Introspection data, tests, man pages, i18n, bash-completion with +optional flags to build/not build support for some things. + +Meson comes with a helper script `ac_converter` that you can use to +convert the basic autoconf checks for your project. + +## Configure.ac + +First let's look at `configure.ac` and write the same in +`meson.build`. + +```autoconf +AC_PREREQ(2.63) +``` +Meson doesn't provide the same function, so just ignore this. + +### Defining variables +`configure.ac`: +```autoconf +m4_define([as_major_version], [0]) +m4_define([as_minor_version], [3]) +m4_define([as_micro_version], [6]) +m4_define([as_version], + [as_major_version.as_minor_version.as_micro_version]) +``` +`meson.build`: +```meson + +as_version = meson.project_version() # set in project() below +ver_arr = as_version.split('.') + +as_major_version = ver_arr[0] +as_minor_version = ver_arr[1] +as_micro_version = ver_arr[2] +``` + +### Initializing project and setting compilers +`configure.ac`: +```autoconf +AC_INIT([appstream-glib],[as_version]) +AC_PROG_CC +``` +`meson.build`: +```meson +project('appstream-glib', 'c', version : '0.3.6') +``` +Note that this must be the first line of your `meson.build` file. + +### AC_SUBST +`configure.ac`: +```autoconf +AC_SUBST(AS_MAJOR_VERSION) +AC_SUBST(AS_MINOR_VERSION) +AC_SUBST(AS_MICRO_VERSION) +AC_SUBST(AS_VERSION) +``` + +You don't need to do the same in Meson, because it does not have two +different types of files (Makefile, configure). + +### Auto headers + +`configure.ac`: + +```autoconf +AC_CONFIG_HEADERS([config.h]) +``` + +`meson.build`: + +```meson +conf = configuration_data() +# Surround the version in quotes to make it a C string +conf.set_quoted('VERSION', as_version) +configure_file(output : 'config.h', + configuration : conf) +``` + +Meson doesn't support autoheaders, you need to manually specify what +do you want to see in header file, write `configuration_data()` object +and use `configure_file()`. + +You can also substitute variables of type `@SOME_VAR@` with configure +data. The details are on the [configuration page](Configuration.md). + +### Finding programs + +`configure.ac`: + +```autoconf +AC_PATH_PROG(GPERF, [gperf], [no]) +if test x$GPERF != xno ; then + AC_DEFINE(HAVE_GPERF,[1], [Use gperf]) +fi +AM_CONDITIONAL(HAVE_GPERF, [test x$GPERF != xno]) +``` + +`meson.build`: + +```meson +gperf = find_program('gperf', required : false) +if gperf.found() + conf.set('HAVE_GPERF', 1) +endif +``` + +### Finding pkg-config modules + +`configure.ac`: + +```autoconf +PKG_CHECK_MODULES(SOUP, libsoup-2.4 >= 2.24) +``` + +`meson.build`: + +```meson +soup = dependency('libsoup-2.4', version : '>= 2.24') +``` + +### Arguments + +`configure.ac`: + +```autoconf +AC_ARG_ENABLE(dep11, AS_HELP_STRING([--enable-dep11],[enable DEP-11]), + enable_dep11=$enableval,enable_dep11=yes) +AM_CONDITIONAL(HAVE_DEP11, test x$enable_dep11 = xyes) +if test x$enable_dep11 = xyes; then + AC_CHECK_HEADER(yaml.h, [], [AC_MSG_ERROR([No yaml.h])]) + YAML_LIBS="-lyaml" + AC_SUBST(YAML_LIBS) + AC_DEFINE(AS_BUILD_DEP11,1,[Build DEP-11 code]) +fi +``` + +`meson.build`: + +```meson +if get_option('enable-dep11') + yaml = dependency('yaml-0.1') + conf.set('AS_BUILD_DEP11', 1) +else + yaml = dependency('yaml-0.1', required : false) +endif +``` + +`meson_options.txt`: + +```meson +option('enable-dep11', type : 'boolean', value : true, description : 'enable DEP-11') +``` + +## Makefile.am + +Next step is `Makefile.am`. In Meson you don't need to have other +file, you still use `meson.build`. + +### Sub directories + +`Makefile.am`: + +```make +SUBDIRS = \ + libappstream-glib +``` + +`meson.build`: + +```meson +subdir('libappstream-glib') +``` + +### *CLEANFILES, EXTRA_DIST, etc. + +`Makefile.am`: + +```make +DISTCLEANFILES = \ + appstream-glib-*.tar.xz + +MAINTAINERCLEANFILES = \ + *~ \ + ABOUT-NLS \ + aclocal.m4 \ + ChangeLog \ + compile \ + config.guess \ + config.h.* \ + config.rpath + +EXTRA_DIST = \ + COPYING \ + MAINTAINERS \ + AUTHORS \ + README.md \ + NEWS \ + autogen.sh \ + config.h +``` + +In Meson you don't need have `*CLEANFILES`, because in Meson you are +building in temporary directory (usually called `build`), you manually +removing it. You also not need to use `EXTRA_DIST`, because you will +make tarballs via `git archive` or something like this. + +### glib-compile-resources + +`Makefile.am`: +```make +as-resources.c: appstream-glib.gresource.xml \ + as-stock-icons.txt \ + as-license-ids.txt \ + as-blacklist-ids.txt \ + as-category-ids.txt \ + as-environment-ids.txt + $(AM_V_GEN) \ + glib-compile-resources \ + --sourcedir=$(srcdir) \ + --sourcedir=$(top_builddir)/data \ + --target=$@ \ + --generate-source \ + --c-name as \ + $(srcdir)/appstream-glib.gresource.xml +as-resources.h: appstream-glib.gresource.xml \ + as-stock-icons.txt \ + as-license-ids.txt \ + as-blacklist-ids.txt \ + as-category-ids.txt \ + as-environment-ids.txt + $(AM_V_GEN) \ + glib-compile-resources \ + --sourcedir=$(srcdir) \ + --sourcedir=$(top_builddir)/data \ + --target=$@ \ + --generate-header \ + --c-name as \ + $(srcdir)/appstream-glib.gresource.xml + +BUILT_SOURCES = \ + as-resources.c \ + as-resources.h +``` + +`meson.build`: + +```meson +asresources = gnome.compile_resources( + 'as-resources', 'appstream-glib.gresource.xml', + source_dir : '.', + c_name : 'as') +``` + +### Headers + +`Makefile.am`: + +```make +libappstream_glib_includedir = $(includedir)/libappstream-glib +libappstream_glib_include_HEADERS = \ + appstream-glib.h \ + as-app.h \ + as-bundle.h \ + as-enums.h \ + as-icon.h \ + as-image.h \ + as-inf.h \ + as-node.h \ + as-problem.h \ + as-provide.h \ + as-release.h \ + as-screenshot.h \ + as-store.h \ + as-tag.h \ + as-utils.h \ + as-version.h +``` + +`meson.build`: + +```meson +headers = [ + 'appstream-glib.h', + 'as-app.h', + 'as-bundle.h', + 'as-enums.h', + 'as-icon.h', + 'as-image.h', + 'as-inf.h', + 'as-node.h', + 'as-problem.h', + 'as-provide.h', + 'as-release.h', + 'as-screenshot.h', + 'as-store.h', + 'as-tag.h', + 'as-utils.h', + 'as-version.h'] +install_headers(headers, subdir : 'libappstream-glib') +``` + +### Libraries + +`Makefile.am`: +```make +lib_LTLIBRARIES = \ + libappstream-glib.la +libappstream_glib_la_SOURCES = \ + as-app.c \ + as-app-desktop.c \ + as-app-inf.c \ + as-app-private.h \ + as-app-validate.c \ + as-bundle.c \ + as-bundle-private.h \ + as-cleanup.h \ + as-enums.c \ + as-icon.c \ + as-icon-private.h \ + as-image.c \ + as-image-private.h \ + as-inf.c \ + as-inf.h \ + as-node.c \ + as-node-private.h \ + as-problem.c \ + as-problem.h \ + as-provide.c \ + as-provide-private.h \ + as-release.c \ + as-release-private.h \ + as-resources.c \ + as-resources.h \ + as-screenshot.c \ + as-screenshot-private.h \ + as-store.c \ + as-tag.c \ + as-utils.c \ + as-utils-private.h \ + as-version.h \ + as-yaml.c \ + as-yaml.h + +libappstream_glib_la_LIBADD = \ + $(GLIB_LIBS) \ + $(GDKPIXBUF_LIBS) \ + $(LIBARCHIVE_LIBS) \ + $(SOUP_LIBS) \ + $(YAML_LIBS) + +libappstream_glib_la_LDFLAGS = \ + -version-info $(LT_CURRENT):$(LT_REVISION):$(LT_AGE) \ + -export-dynamic \ + -no-undefined \ + -export-symbols-regex '^as_.*' +``` + +`meson.build`: + +```meson +sources = [ + 'as-app.c', + 'as-app-desktop.c', + 'as-app-inf.c', + 'as-app-private.h', + 'as-app-validate.c', + 'as-bundle.c', + 'as-bundle-private.h', + 'as-cleanup.h', + 'as-enums.c', + 'as-icon.c', + 'as-icon-private.h', + 'as-image.c', + 'as-image-private.h', + 'as-inf.c', + 'as-inf.h', + 'as-node.c', + 'as-node-private.h', + 'as-problem.c', + 'as-problem.h', + 'as-provide.c', + 'as-provide-private.h', + 'as-release.c', + 'as-release-private.h', + asresources, + 'as-screenshot.c', + 'as-screenshot-private.h', + 'as-store.c', + 'as-tag.c', + 'as-utils.c', + 'as-utils-private.h', + 'as-version.h', + 'as-yaml.c', + 'as-yaml.h'] + +deps = [glib, gdkpixbuf, libarchive, soup, yaml] + +mapfile = 'appstream-glib.map' +vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile) +asglib = shared_library( + 'appstream-glib', sources, + soversion : lt_current, + version : lt_version, + dependencies : deps, + include_directories : include_directories('@0@/..'.format(meson.current_build_dir())), + link_args : ['-Wl,--no-undefined', vflag], + link_depends : mapfile, + install : true) +``` + +`appstream-glib.map`: + +``` +{ +global: + as_*; +local: + *; +}; +``` + +### Custom targets + +`Makefile.am`: + +```make +if HAVE_GPERF +as-tag-private.h: as-tag.gperf + $(AM_V_GEN) gperf < $< > $@ + +libappstream_glib_la_SOURCES += as-tag-private.h +BUILT_SOURCES += as-tag-private.h +endif +``` + +`meson.build`: + +```meson +if gperf.found() + astagpriv = custom_target('gperf as-tag', + output : 'as-tag-private.h', + input : 'as-tag.gperf', + command : [gperf, '@INPUT@', '--output-file', '@OUTPUT@']) + sources = sources + [astagpriv] +endif +``` + +### Global CFLAGS + +`Makefile.am`: + +```make +AM_CPPFLAGS = \ + -DAS_COMPILATION \ + -DLOCALSTATEDIR=\""$(localstatedir)"\" \ + -DG_LOG_DOMAIN=\"As\" +``` + +`meson.build`: + +```meson +add_project_arguments('-DG_LOG_DOMAIN="As"', language : 'c') +add_project_arguments('-DAS_COMPILATION', language : 'c') +add_project_arguments('-DLOCALSTATEDIR="/var"', language : 'c') +``` + +### Tests + +`Makefile.am`: + +```make +check_PROGRAMS = \ + as-self-test +as_self_test_SOURCES = \ + as-self-test.c +as_self_test_LDADD = \ + $(GLIB_LIBS) \ + $(GDKPIXBUF_LIBS) \ + $(LIBARCHIVE_LIBS) \ + $(SOUP_LIBS) \ + $(YAML_LIBS) \ + $(lib_LTLIBRARIES) +as_self_test_CFLAGS = -DTESTDATADIR=\""$(top_srcdir)/data/tests"\" + +TESTS = as-self-test +``` + +`meson.build`: + +```meson +selftest = executable( + 'as-self-test', 'as-self-test.c', + include_directories : include_directories('@0@/..'.format(meson.current_build_dir())), + dependencies : deps, + c_args : '-DTESTDATADIR="@0@/../data/tests"'.format(meson.current_source_dir()), + link_with : asglib) +test('as-self-test', selftest) +``` + +### GObject Introspection + +`Makefile.am`: + +```make +introspection_sources = \ + as-app.c \ + as-app-validate.c \ + as-app.h \ + as-bundle.c \ + as-bundle.h \ + as-enums.c \ + as-enums.h \ + as-icon.c \ + as-icon.h \ + as-image.c \ + as-image.h \ + as-inf.c \ + as-inf.h \ + as-node.c \ + as-node.h \ + as-problem.c \ + as-problem.h \ + as-provide.c \ + as-provide.h \ + as-release.c \ + as-release.h \ + as-screenshot.c \ + as-screenshot.h \ + as-store.c \ + as-store.h \ + as-tag.c \ + as-tag.h \ + as-utils.c \ + as-utils.h \ + as-version.h + +AppStreamGlib-1.0.gir: libappstream-glib.la +AppStreamGlib_1_0_gir_INCLUDES = GObject-2.0 Gio-2.0 GdkPixbuf-2.0 +AppStreamGlib_1_0_gir_CFLAGS = $(AM_CPPFLAGS) +AppStreamGlib_1_0_gir_SCANNERFLAGS = --identifier-prefix=As \ + --symbol-prefix=as_ \ + --warn-all \ + --add-include-path=$(srcdir) +AppStreamGlib_1_0_gir_EXPORT_PACKAGES = appstream-glib +AppStreamGlib_1_0_gir_LIBS = libappstream-glib.la +AppStreamGlib_1_0_gir_FILES = $(introspection_sources) +INTROSPECTION_GIRS += AppStreamGlib-1.0.gir + +girdir = $(datadir)/gir-1.0 +gir_DATA = $(INTROSPECTION_GIRS) + +typelibdir = $(libdir)/girepository-1.0 +typelib_DATA = $(INTROSPECTION_GIRS:.gir=.typelib) + +CLEANFILES += $(gir_DATA) $(typelib_DATA) +``` + +`meson.build`: + +```meson +introspection_sources = [ + 'as-app.c', + 'as-app-validate.c', + 'as-app.h', + 'as-bundle.c', + 'as-bundle.h', + 'as-enums.c', + 'as-enums.h', + 'as-icon.c', + 'as-icon.h', + 'as-image.c', + 'as-image.h', + 'as-inf.c', + 'as-inf.h', + 'as-node.c', + 'as-node.h', + 'as-problem.c', + 'as-problem.h', + 'as-provide.c', + 'as-provide.h', + 'as-release.c', + 'as-release.h', + 'as-screenshot.c', + 'as-screenshot.h', + 'as-store.c', + 'as-store.h', + 'as-tag.c', + 'as-tag.h', + 'as-utils.c', + 'as-utils.h', + 'as-version.h'] + +gnome.generate_gir(asglib, + sources : introspection_sources, + nsversion : '1.0', + namespace : 'AppStreamGlib', + symbol_prefix : 'as_', + identifier_prefix : 'As', + export_packages : 'appstream-glib', + includes : ['GObject-2.0', 'Gio-2.0', 'GdkPixbuf-2.0'], + install : true +) +``` + +### GSettings + +`configure.ac`: +```sh +GLIB_GSETTINGS +``` + +`Makefile.am`: +```make +gsettings_SCHEMAS = foo.gschema.xml +@GSETTINGS_RULES@ +``` + +`meson.build`: +```meson +install_data('foo.gschema.xml', install_dir: get_option('datadir') / 'glib-2.0' / 'schemas') +meson.add_install_script('meson_post_install.py') +``` + +`meson_post_install.py`: +```python +#!/usr/bin/env python3 + +import os +import subprocess + +schemadir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], 'share', 'glib-2.0', 'schemas') + +if not os.environ.get('DESTDIR'): + print('Compiling gsettings schemas...') + subprocess.call(['glib-compile-schemas', schemadir]) +``` + +### gettext + +Note this example does not include `intltool` usage. + +`configure.ac`: +```m4 +AM_GNU_GETTEXT([external]) +AM_GNU_GETTEXT_VERSION([0.19.7]) + +GETTEXT_PACKAGE=foo +AC_SUBST(GETTEXT_PACKAGE) +AC_DEFINE_UNQUOTED(GETTEXT_PACKAGE, "$GETTEXT_PACKAGE", [The prefix for our gettext translation domains.]) +``` + +`po/Makevars`: +```make +XGETTEXT_OPTIONS = --from-code=UTF-8 --keyword=_ --keyword=N_ --keyword=C_:1c,2 --keyword=NC_:1c,2 --keyword=g_dngettext:2,3 --add-comments +``` + +`Makefile.am`: +```make +%.desktop: %.desktop.in + $(AM_V_GEN)$(MSGFMT) --desktop --template $< -d $(top_srcdir)/po -o $@ + +%.appdata.xml: %.appdata.xml.in + $(AM_V_GEN)$(MSGFMT) --xml --template $< -d $(top_srcdir)/po -o $@ +``` + +`meson.build`: +```meson +i18n = import('i18n') + +gettext_package = 'foo' +add_project_arguments('-DGETTEXT_PACKAGE=' + gettext_package, language: 'c') +subdir('po') + +i18n.merge_file( + input: 'foo.desktop.in', + output: 'foo.desktop', + type: 'desktop', + po_dir: 'po', + install: true, + install_dir: get_option('datadir') / 'applications' +) + +i18n.merge_file( + input: 'foo.appdata.xml.in', + output: 'foo.appdata.xml', + po_dir: 'po', + install: true, + install_dir: get_option('datadir') / 'appdata' +) +``` + +`po/meson.build`: +```meson +i18n.gettext(gettext_package, preset: 'glib') +``` diff --git a/meson/docs/markdown/Precompiled-headers.md b/meson/docs/markdown/Precompiled-headers.md new file mode 100644 index 000000000..e78dfe04e --- /dev/null +++ b/meson/docs/markdown/Precompiled-headers.md @@ -0,0 +1,119 @@ +--- +short-description: Using precompiled headers to reduce compilation time +... + +# Precompiled headers + +Parsing header files of system libraries is surprisingly expensive. A +typical source file has less than one thousand lines of code. In +contrast the headers of large libraries can be tens of thousands of +lines. This is especially problematic with C++, where header-only +libraries are common and they may contain extremely complex code. This +makes them slow to compile. + +Precompiled headers are a tool to mitigate this issue. Basically what +they do is parse the headers and then serialize the compiler's +internal state to disk. The downside of precompiled headers is that +they are tricky to set up. Meson has native support for precompiled +headers, but using them takes a little work. + +A precompiled header file is relatively simple. It is a header file +that contains `#include` directives for the system headers to +precompile. Here is a C++ example. + +```cpp + #include + #include + #include +``` + +In Meson, precompiled header files are always per-target. That is, the +given precompiled header is used when compiling every single file in +the target. Due to limitations of the underlying compilers, this +header file must not be in the same subdirectory as any of the source +files. It is strongly recommended that you create a subdirectory +called `pch` in the target directory and put the header files (and +nothing else) there. + +Toggling the usage of precompiled headers +-- + +If you wish to compile your project without precompiled headers, you +can change the value of the pch option by passing `-Db_pch=false` +argument to Meson at configure time or later with `meson configure`. +You can also toggle the use of pch in a configured build directory +with the GUI tool. You don't have to do any changes to the source +code. Typically this is done to test whether your project compiles +cleanly without pch (that is, checking that its #includes are in +order) and working around compiler bugs. + +Using precompiled headers with GCC and derivatives +-- + +Once you have a file to precompile, you can enable the use of pch for +a given target with a *pch* keyword argument. As an example, let's +assume you want to build a small C binary with precompiled headers. +Let's say the source files of the binary use the system headers +`stdio.h` and `string.h`. Then you create a header file +`pch/myexe_pch.h` with this content: + +```c +#include +#include +``` + +And add this to Meson: + +```meson +executable('myexe', sources : sourcelist, c_pch : 'pch/myexe_pch.h') +``` + +That's all. You should note that your source files must _not_ include +the file `myexe_pch.h` and you must _not_ add the pch subdirectory to +your search path. Any modification of the original program files is +not necessary. Meson will make the compiler include the pch with +compiler options. If you want to disable pch (because of, say, +compiler bugs), it can be done entirely on the build system side with +no changes to source code. + +You can use precompiled headers on any build target. If your target +has multiple languages, you can specify multiple pch files like this. + +```meson +executable('multilang', sources : srclist, + c_pch : 'pch/c_pch.h', cpp_pch : 'pch/cpp_pch.h') +``` + +Using precompiled headers with MSVC +-- +Since Meson version 0.50.0, precompiled headers with MSVC work just like +with GCC. Meson will automatically create the matching pch implementation +file for you. + +Before version 0.50.0, in addition to the header file, Meson +also requires a corresponding source file. If your header is called +`foo_pch.h`, the corresponding source file is usually called +`foo_pch.cpp` and it resides in the same `pch` subdirectory as the +header. Its contents are this: + +```cpp +#if !defined(_MSC_VER) +#error "This file is only for use with MSVC." +#endif + +#include "foo_pch.h" +``` + +To enable pch, simply list both files in the target definition: + +```meson +executable('myexe', sources : srclist, + cpp_pch : ['pch/foo_pch.h', 'pch/foo_pch.cpp']) +``` + +This form will work with both GCC and msvc, because Meson knows that +GCC does not need a `.cpp` file and thus just ignores it. + +It should be noted that due to implementation details of the MSVC +compiler, having precompiled headers for multiple languages in the +same target is not guaranteed to work. diff --git a/meson/docs/markdown/Project-templates.md b/meson/docs/markdown/Project-templates.md new file mode 100644 index 000000000..7ded318b7 --- /dev/null +++ b/meson/docs/markdown/Project-templates.md @@ -0,0 +1,49 @@ +--- +short-description: Project templates +... + +# Project templates + +To make it easier for new developers to start working, Meson ships a +tool to generate the basic setup of different kinds of projects. This +functionality can be accessed with the `meson init` command. A typical +project setup would go like this: + +```console +$ mkdir project_name +$ cd project_name +$ meson init --language=c --name=myproject --version=0.1 +``` + +This would create the build definitions for a helloworld type +project. The result can be compiled as usual. For example it +could be done like this: + +``` +$ meson setup builddir +$ meson compile -C builddir +``` + +The generator has many different projects and settings. They can all +be listed by invoking the command `meson init --help`. + +This feature is available since Meson version 0.45.0. + +# Generate a build script for an existing project + +With `meson init` you can generate a build script for an existing +project with existing project files by running the command in the +root directory of your project. Meson currently supports this +feature for `executable`, and `jar` projects. + +# Build after generation of template + +It is possible to have Meson generate a build directory from the +`meson init` command without running `meson setup`. This is done +by passing `-b` or `--build` switch. + +```console +$ mkdir project_name +$ cd project_name +$ meson init --language=c --name=myproject --version=0.1 --build +``` \ No newline at end of file diff --git a/meson/docs/markdown/Python-3-module.md b/meson/docs/markdown/Python-3-module.md new file mode 100644 index 000000000..b89ea3e2f --- /dev/null +++ b/meson/docs/markdown/Python-3-module.md @@ -0,0 +1,57 @@ +# Python 3 module + +This module provides support for dealing with Python 3. It has the +following methods. + +This module is deprecated and replaced by the +[python](Python-module.md) module. + +## find_python + +This is a cross platform way of finding the Python 3 executable, which +may have a different name on different operating systems. Returns an +[external program](Reference-manual.md#external-program-object) +object. + +*Added 0.38.0* + +Deprecated, replaced by +[`find_installation`](Python-module.md#find_installation) function +from `python` module. + +## extension_module + +Creates a `shared_module` target that is named according to the naming +conventions of the target platform. All positional and keyword +arguments are the same as for +[shared_module](Reference-manual.md#shared_module). + +`extension_module` does not add any dependencies to the library so user may +need to add `dependencies : dependency('python3')`, see +[Python3 dependency](Dependencies.md#python3). + +*Added 0.38.0* + +Deprecated, replaced by +[`extension_module`](Python-module.md#extension_module) method from +`python` module. + +## language_version + +Returns a string with the Python language version such as `3.5`. + +*Added 0.40.0* + +Deprecated, replaced by +[`language_version`](Python-module.md#language_version) method from +`python` module. + +## sysconfig_path + +Returns the Python sysconfig path without prefix, such as +`lib/python3.6/site-packages`. + +*Added 0.40.0* + +Deprecated, replaced by [`get_path`](Python-module.md#get_path) +method from `python` module. diff --git a/meson/docs/markdown/Python-module.md b/meson/docs/markdown/Python-module.md new file mode 100644 index 000000000..3b7b4f59c --- /dev/null +++ b/meson/docs/markdown/Python-module.md @@ -0,0 +1,248 @@ +--- +short-description: Generic python module +authors: + - name: Mathieu Duponchelle + email: mathieu@centricular.com + years: [2018] + has-copyright: false +... + +# Python module + +This module provides support for finding and building extensions against +python installations, be they python 2 or 3. + +*Added 0.46.0* + +## Functions + +### `find_installation()` + +``` meson +pymod.find_installation(name_or_path, ...) +``` + +Find a python installation matching `name_or_path`. + +That argument is optional, if not provided then the returned python +installation will be the one used to run Meson. + +If provided, it can be: + +- A simple name, eg `python-2.7`, Meson will look for an external program + named that way, using [find_program] + +- A path, eg `/usr/local/bin/python3.4m` + +- One of `python2` or `python3`: in either case, the module will try + some alternative names: `py -2` or `py -3` on Windows, and `python` + everywhere. In the latter case, it will check whether the version + provided by the sysconfig module matches the required major version + +Keyword arguments are the following: + +- `required`: by default, `required` is set to `true` and Meson will + abort if no python installation can be found. If `required` is set to `false`, + Meson will continue even if no python installation was found. You can + then use the `.found()` method on the returned object to check + whether it was found or not. Since *0.48.0* the value of a + [`feature`](Build-options.md#features) option can also be passed to the + `required` keyword argument. +- `disabler`: if `true` and no python installation can be found, return a + [disabler object](Reference-manual.md#disabler-object) instead of a not-found object. + *Since 0.49.0* +- `modules`: a list of module names that this python installation must have. + *Since 0.51.0* + +**Returns**: a [python installation][`python_installation` object] + +## `python_installation` object + +The `python_installation` object is an [external program], with several +added methods. + +### Methods + +#### `path()` + +```meson +str py_installation.path() +``` + +*Added 0.50.0* + +Works like the path method of other `ExternalProgram` objects. Was not +provided prior to 0.50.0 due to a bug. + +#### `extension_module()` + +``` meson +shared_module py_installation.extension_module(module_name, list_of_sources, ...) +``` + +Create a `shared_module` target that is named according to the naming +conventions of the target platform. + +All positional and keyword arguments are the same as for +[shared_module], excluding `name_suffix` and `name_prefix`, and with +the addition of the following: + +- `subdir`: By default, Meson will install the extension module in + the relevant top-level location for the python installation, eg + `/usr/lib/site-packages`. When subdir is passed to this method, + it will be appended to that location. This keyword argument is + mutually exclusive with `install_dir` + +`extension_module` does not add any dependencies to the library so +user may need to add `dependencies : py_installation.dependency()`, +see [][`dependency()`]. + +**Returns**: a [buildtarget object] + +#### `dependency()` + +``` meson +python_dependency py_installation.dependency(...) +``` + +This method accepts no positional arguments, and the same keyword +arguments as the standard [dependency] function. It also supports the +following keyword argument: + +- `embed`: *(since 0.53.0)* If true, Meson will try to find a python + dependency that can be used for embedding python into an + application. + +**Returns**: a [python dependency][`python_dependency` object] + +#### `install_sources()` + +``` meson +void py_installation.install_sources(list_of_files, ...) +``` + +Install actual python sources (`.py`). + +All positional and keyword arguments are the same as for +[install_data], with the addition of the following: + +- `pure`: On some platforms, architecture independent files are + expected to be placed in a separate directory. However, if the + python sources should be installed alongside an extension module + built with this module, this keyword argument can be used to + override that behaviour. Defaults to `true` + +- `subdir`: See documentation for the argument of the same name to + [][`extension_module()`] + +#### `get_install_dir()` + +``` meson +string py_installation.get_install_dir(...) +``` + +Retrieve the directory [][`install_sources()`] will install to. + +It can be useful in cases where `install_sources` cannot be used +directly, for example when using [configure_file]. + +This function accepts no arguments, its keyword arguments are the same +as [][`install_sources()`]. + +**Returns**: A string + +#### `language_version()` + +``` meson +string py_installation.language_version() +``` + +Get the major.minor python version, eg `2.7`. + +The version is obtained through the `sysconfig` module. + +This function expects no arguments or keyword arguments. + +**Returns**: A string + +#### `get_path()` + +``` meson +string py_installation.get_path(path_name, fallback) +``` + +Get a path as defined by the `sysconfig` module. + +For example: + +``` meson +purelib = py_installation.get_path('purelib') +``` + +This function requires at least one argument, `path_name`, +which is expected to be a non-empty string. + +If `fallback` is specified, it will be returned if no path +with the given name exists. Otherwise, attempting to read +a non-existing path will cause a fatal error. + +**Returns**: A string + +#### `has_path()` + +``` meson + bool py_installation.has_path(path_name) +``` + +**Returns**: true if a path named `path_name` can be retrieved with +[][`get_path()`], false otherwise. + +#### `get_variable()` + +``` meson +string py_installation.get_variable(variable_name, fallback) +``` + +Get a variable as defined by the `sysconfig` module. + +For example: + +``` meson +py_bindir = py_installation.get_variable('BINDIR', '') +``` + +This function requires at least one argument, `variable_name`, +which is expected to be a non-empty string. + +If `fallback` is specified, it will be returned if no variable +with the given name exists. Otherwise, attempting to read +a non-existing variable will cause a fatal error. + +**Returns**: A string + +#### `has_variable()` + +``` meson + bool py_installation.has_variable(variable_name) +``` + +**Returns**: true if a variable named `variable_name` can be retrieved +with [][`get_variable()`], false otherwise. + +## `python_dependency` object + +This [dependency object] subclass will try various methods to obtain +the compiler and linker arguments, starting with pkg-config then +potentially using information obtained from python's `sysconfig` +module. + +It exposes the same methods as its parent class. + +[find_program]: Reference-manual.md#find_program +[shared_module]: Reference-manual.md#shared_module +[external program]: Reference-manual.md#external-program-object +[dependency]: Reference-manual.md#dependency +[install_data]: Reference-manual.md#install_data +[configure_file]: Reference-manual.md#configure_file +[dependency object]: Reference-manual.md#dependency-object +[buildtarget object]: Reference-manual.md#build-target-object diff --git a/meson/docs/markdown/Qt4-module.md b/meson/docs/markdown/Qt4-module.md new file mode 100644 index 000000000..6b6241569 --- /dev/null +++ b/meson/docs/markdown/Qt4-module.md @@ -0,0 +1,6 @@ +# Qt4 module + +This module provides support for Qt4's `moc`, `uic` and `rcc` +tools. It is used identically to the [Qt 5 module](Qt5-module.md). + +{{ _include_qt_base.md }} diff --git a/meson/docs/markdown/Qt5-module.md b/meson/docs/markdown/Qt5-module.md new file mode 100644 index 000000000..8a5eb00a0 --- /dev/null +++ b/meson/docs/markdown/Qt5-module.md @@ -0,0 +1,6 @@ +# Qt5 module + +The Qt5 module provides tools to automatically deal with the various +tools and steps required for Qt. + +{{ _include_qt_base.md }} diff --git a/meson/docs/markdown/Qt6-module.md b/meson/docs/markdown/Qt6-module.md new file mode 100644 index 000000000..4d40423f9 --- /dev/null +++ b/meson/docs/markdown/Qt6-module.md @@ -0,0 +1,8 @@ +# Qt6 module + +*New in Meson 0.57.0* + +The Qt5 module provides tools to automatically deal with the various +tools and steps required for Qt. + +{{ _include_qt_base.md }} diff --git a/meson/docs/markdown/Quick-guide.md b/meson/docs/markdown/Quick-guide.md new file mode 100644 index 000000000..c1de82048 --- /dev/null +++ b/meson/docs/markdown/Quick-guide.md @@ -0,0 +1,158 @@ +--- +title: Quickstart Guide +short-description: Getting Started using Mesonbuild +... + +# Using Meson + +Meson has been designed to be as simple to use as possible. This page +outlines the initial steps needed for installation, troubleshooting, +and standard use. + +For more advanced configuration please refer to the command line help +`meson --help` or the Meson documentation located at the +[Mesonbuild](https://mesonbuild.com) website. + +Table of Contents: +* [Requirements](#requirements) +* [Installation using package manager](#installation-using-package-manager) +* [Installation using Python](#installation-using-python) +* [Installation from source](#installation-from-source) +* [Troubleshooting](#troubleshooting) +* [Compiling a Meson project](#compiling-a-meson-project) +* [Using Meson as a distro packager](#using-meson-as-a-distro-packager) + +Requirements +-- + +* [Python 3](https://python.org) +* [Ninja](https://github.com/ninja-build/ninja/) + +*Ninja is only needed if you use the Ninja backend. Meson can also +generate native VS and Xcode project files.* + + +Installation using package manager +-- + +Ubuntu: + +```console +$ sudo apt-get install python3 python3-pip python3-setuptools \ + python3-wheel ninja-build +``` +*Due to our frequent release cycle and development speed, distro packaged software may quickly become outdated.* + +Installation using Python +-- +Requirements: **pip3** + +The best way to receive the most up-to-date version of Mesonbuild. + +Install as a local user (recommended): +```console +$ pip3 install --user meson +``` +Install as root: +```console +$ pip3 install meson +``` + +*If you are unsure whether to install as root or a local user, install + as a local user.* + + +Installation from source +-- +Requirements: **git** + +Meson can be run directly from the cloned git repository. + +```console +$ git clone https://github.com/mesonbuild/meson.git /path/to/sourcedir +``` +Troubleshooting: +-- +Common Issues: +```console +$ meson builddir +$ bash: /usr/bin/meson: No such file or directory +``` + +Description: The default installation prefix for the python pip module +installation is not included in your shell environment PATH. The +default prefix for python pip installation modules is located under +``/usr/local``. + +**Resolution: +This issue can be resolved by altering the default shell environment +PATH to include ``/usr/local/bin``. ** + +*Note: There are other ways of fixing this issue such as using + symlinks or copying the binaries to a default path and these methods + are not recommended or supported as they may break package management + interoperability.* + + +Compiling a Meson project +-- + +The most common use case of Meson is compiling code on a code base you +are working on. The steps to take are very simple. + +```console +$ cd /path/to/source/root +$ meson builddir && cd builddir +$ meson compile +$ meson test +``` + +The only thing to note is that you need to create a separate build +directory. Meson will not allow you to build source code inside your +source tree. All build artifacts are stored in the build directory. +This allows you to have multiple build trees with different +configurations at the same time. This way generated files are not +added into revision control by accident. + +To recompile after code changes, just type `meson compile`. The build +command is always the same. You can do arbitrary changes to source +code and build system files and Meson will detect those and will do +the right thing. If you want to build optimized binaries, just use the +argument `--buildtype=debugoptimized` when running Meson. It is +recommended that you keep one build directory for unoptimized builds +and one for optimized ones. To compile any given configuration, just +go into the corresponding build directory and run `meson compile`. + +Meson will automatically add compiler flags to enable debug +information and compiler warnings (i.e. `-g` and `-Wall`). This means +the user does not have to deal with them and can instead focus on +coding. + +Using Meson as a distro packager +-- + +Distro packagers usually want total control on the build flags +used. Meson supports this use case natively. The commands needed to +build and install Meson projects are the following. + +```console +$ cd /path/to/source/root +$ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=... +$ meson compile -C builddir +$ meson test -C builddir +$ DESTDIR=/path/to/staging/root meson install -C builddir +``` + +The command line switch `--buildtype=plain` tells Meson not to add its +own flags to the command line. This gives the packager total control +on used flags. + +This is very similar to other build systems. The only difference is +that the `DESTDIR` variable is passed as an environment variable +rather than as an argument to `meson install`. + +As distro builds happen always from scratch, you might consider +enabling [unity builds](Unity-builds.md) on your packages because they +are faster and produce better code. However there are many projects +that do not build with unity builds enabled so the decision to use +unity builds must be done by the packager on a case by case basis. diff --git a/meson/docs/markdown/RPM-module.md b/meson/docs/markdown/RPM-module.md new file mode 100644 index 000000000..cab6d968a --- /dev/null +++ b/meson/docs/markdown/RPM-module.md @@ -0,0 +1,16 @@ +# RPM module + +The RPM module can be used to create a sample rpm spec file for a +Meson project. It autodetects installed files, dependencies and so +on. Using it is very simple. At the very end of your Meson project +(that is, the end of your top level `meson.build` file) add these two +lines. + +```meson +rpm = import('rpm') +rpm.generate_spec_template() +``` + +Run Meson once on your code and the template will be written in your +build directory. Then remove the two lines above and manually edit the +template to add missing information. After this it is ready for use. diff --git a/meson/docs/markdown/Reference-manual.md b/meson/docs/markdown/Reference-manual.md new file mode 100644 index 000000000..e96a6fc41 --- /dev/null +++ b/meson/docs/markdown/Reference-manual.md @@ -0,0 +1,2923 @@ +# Reference manual + +## Functions + +The following functions are available in build files. Click on each to +see the description and usage. The objects returned by them are [list +afterwards](#returned-objects). + +### add_global_arguments() + +``` meson + void add_global_arguments(arg1, arg2, ...) +``` + +Adds the positional arguments to the compiler command line. This +function has two keyword arguments: + +- `language`: specifies the language(s) that the arguments should be +applied to. If a list of languages is given, the arguments are added +to each of the corresponding compiler command lines. Note that there +is no way to remove an argument set in this way. If you have an +argument that is only used in a subset of targets, you have to specify +it in per-target flags. + +- `native` *(since 0.48.0)*: a boolean specifying whether the arguments should be + applied to the native or cross compilation. If `true` the arguments + will only be used for native compilations. If `false` the arguments + will only be used in cross compilations. If omitted, the flags are + added to native compilations if compiling natively and cross + compilations (only) when cross compiling. + +The arguments are used in all compiler invocations with the exception +of compile tests, because you might need to run a compile test with +and without the argument in question. For this reason only the +arguments explicitly specified are used during compile tests. + +**Note:** Usually you should use `add_project_arguments` instead, + because that works even when you project is used as a subproject. + +**Note:** You must pass always arguments individually `arg1, arg2, + ...` rather than as a string `'arg1 arg2', ...` + +### add_global_link_arguments() + +``` meson + void add_global_link_arguments(*arg1*, *arg2*, ...) +``` + +Like `add_global_arguments` but the arguments are passed to the linker. + +### add_languages() + +``` meson + bool add_languages(*langs*) +``` + +Add programming languages used by the project. Equivalent to having +them in the `project` declaration. This function is usually used to +add languages that are only used under some conditions, like this: + +```meson +project('foobar', 'c') +if compiling_for_osx + add_languages('objc') +endif +if add_languages('cpp', required : false) + executable('cpp-app', 'main.cpp') +endif +``` + +Takes the following keyword arguments: + +- `required`: defaults to `true`, which means that if any of the languages +specified is not found, Meson will halt. *(since 0.47.0)* The value of a +[`feature`](Build-options.md#features) option can also be passed. + +- `native` *(since 0.54.0)*: if set to `true`, the language will be used to compile for the build + machine, if `false`, for the host machine. + +Returns `true` if all languages specified were found and `false` otherwise. + +If `native` is omitted, the languages may be used for either build or host +machine, but are never required for the build machine. (i.e. it is equivalent +to `add_languages(*langs*, native: false, required: *required*) and +add_languages(*langs*, native: true, required: false)`. This default behaviour +may change to `native: false` in a future Meson version. + +### add_project_arguments() + +``` meson + void add_project_arguments(arg1, arg2, ...) +``` + +This function behaves in the same way as `add_global_arguments` except +that the arguments are only used for the current project, they won't +be used in any other subproject. + +### add_project_link_arguments() + +``` meson + void add_project_link_arguments(*arg1*, *arg2*, ...) +``` + +Like `add_project_arguments` but the arguments are passed to the linker. + +### add_test_setup() + +``` meson + void add_test_setup(*name*, ...) +``` + +Add a custom test setup that can be used to run the tests with a +custom setup, for example under Valgrind. The keyword arguments are +the following: + +- `env`: environment variables to set, such as `['NAME1=value1', + 'NAME2=value2']`, or an [`environment()` + object](#environment-object) which allows more sophisticated + environment juggling. *(since 0.52.0)* A dictionary is also accepted. +- `exe_wrapper`: a list containing the wrapper command or script followed by the arguments to it +- `gdb`: if `true`, the tests are also run under `gdb` +- `timeout_multiplier`: a number to multiply the test timeout with. + *Since 0.57* if timeout_multiplier is `<= 0` the test has infinite duration, + in previous versions of Meson the test would fail with a timeout immediately. +- `is_default` *(since 0.49.0)*: a bool to set whether this is the default test setup. + If `true`, the setup will be used whenever `meson test` is run + without the `--setup` option. +- `exclude_suites` *(since 0.57.0)*: a list of test suites that should be + excluded when using this setup. Suites specified in the `--suite` option + to `meson test` will always run, overriding `add_test_setup` if necessary. + +To use the test setup, run `meson test --setup=*name*` inside the +build dir. + +Note that all these options are also available while running the +`meson test` script for running tests instead of `ninja test` or +`msbuild RUN_TESTS.vcxproj`, etc depending on the backend. + +### alias_target + +``` meson +runtarget alias_target(target_name, dep1, ...) +``` + +*(since 0.52.0)* + +This function creates a new top-level target. Like all top-level +targets, this integrates with the selected backend. For instance, with +you can run it as `meson compile target_name`. This is a dummy target +that does not execute any command, but ensures that all dependencies +are built. Dependencies can be any build target (e.g. return value of +[executable()](#executable), custom_target(), etc) + +### assert() + +``` meson + void assert(*condition*, *message*) +``` + +Abort with an error message if `condition` evaluates to `false`. + +*(since 0.53.0)* `message` argument is optional and defaults to print the condition +statement instead. + +### benchmark() + +``` meson + void benchmark(name, executable, ...) +``` + +Creates a benchmark item that will be run when the benchmark target is +run. The behavior of this function is identical to [`test()`](#test) +except for: + +* benchmark() has no `is_parallel` keyword because benchmarks are not run in parallel +* benchmark() does not automatically add the `MALLOC_PERTURB_` environment variable + +*Note:* Prior to 0.52.0 benchmark would warn that `depends` and +`priority` were unsupported, this is incorrect. + +### both_libraries() + +``` meson + buildtarget = both_libraries(library_name, list_of_sources, ...) +``` + +*(since 0.46.0)* + +Builds both a static and shared library with the given sources. +Positional and keyword arguments are otherwise the same as for +[`library`](#library). Source files will be compiled only once and +object files will be reused to build both shared and static libraries, +unless `b_staticpic` user option or `pic` argument are set to false in +which case sources will be compiled twice. + +The returned [buildtarget](#build-target-object) always represents the +shared library. In addition it supports the following extra methods: + +- `get_shared_lib()` returns the shared library build target +- `get_static_lib()` returns the static library build target + +### build_target() + +Creates a build target whose type can be set dynamically with the +`target_type` keyword argument. + +`target_type` may be set to one of: + +- `executable` +- `shared_library` +- `shared_module` +- `static_library` +- `both_libraries` +- `library` +- `jar` + +This declaration: + +```meson +executable() +``` + +is equivalent to this: + +```meson +build_target(, target_type : 'executable') +``` + +The object returned by `build_target` and all convenience wrappers for +`build_target` such as [`executable`](#executable) and +[`library`](#library) has methods that are documented in the [object +methods section](#build-target-object) below. + +### configuration_data() + +``` meson + configuration_data_object = configuration_data(...) +``` + +Creates an empty configuration object. You should add your +configuration with [its method calls](#configuration-data-object) and +finally use it in a call to `configure_file`. + +*(since 0.49.0)* Takes an optional dictionary as first argument. If +provided, each key/value pair is added into the `configuration_data` +as if `set()` method was called for each of them. + +### configure_file() + +``` meson + generated_file = configure_file(...) +``` + +This function can run in three modes depending on the keyword arguments +passed to it. + +When a [`configuration_data()`](#configuration_data) object is passed +to the `configuration:` keyword argument, it takes a template file as +the `input:` (optional) and produces the `output:` (required) by +substituting values from the configuration data as detailed in [the +configuration file documentation](Configuration.md). *(since 0.49.0)* +A dictionary can be passed instead of a +[`configuration_data()`](#configuration_data) object. + +When a list of strings is passed to the `command:` keyword argument, +it takes any source or configured file as the `input:` and assumes +that the `output:` is produced when the specified command is run. + +*(since 0.47.0)* When the `copy:` keyword argument is set to `true`, +this function will copy the file provided in `input:` to a file in the +build directory with the name `output:` in the current directory. + +These are all the supported keyword arguments: + +- `capture` *(since 0.41.0)*: when this argument is set to true, + Meson captures `stdout` of the `command` and writes it to the target + file specified as `output`. +- `command`: as explained above, if specified, Meson does not create + the file itself but rather runs the specified command, which allows + you to do fully custom file generation. *(since 0.52.0)* The command can contain + file objects and more than one file can be passed to the `input` keyword + argument, see [`custom_target()`](#custom_target) for details about string + substitutions. +- `copy` *(since 0.47.0)*: as explained above, if specified Meson only + copies the file from input to output. +- `depfile` *(since 0.52.0)*: a dependency file that the command can write listing + all the additional files this target depends on. A change + in any one of these files triggers a reconfiguration. +- `format` *(since 0.46.0)*: the format of defines. It defaults to `meson`, and so substitutes +`#mesondefine` statements and variables surrounded by `@` characters, you can also use `cmake` +to replace `#cmakedefine` statements and variables with the `${variable}` syntax. Finally you can use +`cmake@` in which case substitutions will apply on `#cmakedefine` statements and variables with +the `@variable@` syntax. +- `input`: the input file name. If it's not specified in configuration + mode, all the variables in the `configuration:` object (see above) + are written to the `output:` file. +- `install` *(since 0.50.0)*: when true, this generated file is installed during +the install step, and `install_dir` must be set and not empty. When false, this +generated file is not installed regardless of the value of `install_dir`. +When omitted it defaults to true when `install_dir` is set and not empty, +false otherwise. +- `install_dir`: the subdirectory to install the generated file to + (e.g. `share/myproject`), if omitted or given the value of empty + string, the file is not installed. +- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format + and optionally the owner/uid and group/gid for the installed files. +- `output`: the output file name. *(since 0.41.0)* may contain + `@PLAINNAME@` or `@BASENAME@` substitutions. In configuration mode, + the permissions of the input file (if it is specified) are copied to + the output file. +- `output_format` *(since 0.47.0)*: the format of the output to generate when no input + was specified. It defaults to `c`, in which case preprocessor directives + will be prefixed with `#`, you can also use `nasm`, in which case the + prefix will be `%`. +- `encoding` *(since 0.47.0)*: set the file encoding for the input and output file, + defaults to utf-8. The supported encodings are those of python3, see + [standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings). + +### custom_target() + +``` meson + customtarget custom_target(*name*, ...) +``` + +Create a custom top level build target. The only positional argument +is the name of this target and the keyword arguments are the +following. + +- `build_by_default` *(since 0.38.0)*: causes, when set to true, to + have this target be built by default. This means it will be built when + `meson compile` is called without any arguments. The default value is `false`. + *(since 0.50.0)* If `build_by_default` is explicitly set to false, `install` + will no longer override it. If `build_by_default` is not set, `install` will + still determine its default. +- `build_always` **(deprecated)**: if `true` this target is always considered out of + date and is rebuilt every time. Equivalent to setting both + `build_always_stale` and `build_by_default` to true. +- `build_always_stale` *(since 0.47.0)*: if `true` the target is always considered out of date. + Useful for things such as build timestamps or revision control tags. + The associated command is run even if the outputs are up to date. +- `capture`: there are some compilers that can't be told to write + their output to a file but instead write it to standard output. When + this argument is set to true, Meson captures `stdout` and writes it + to the target file. Note that your command argument list may not + contain `@OUTPUT@` when capture mode is active. +- `console` *(since 0.48.0)*: keyword argument conflicts with `capture`, and is meant + for commands that are resource-intensive and take a long time to + finish. With the Ninja backend, setting this will add this target + to [Ninja's `console` pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool), + which has special properties such as not buffering stdout and + serializing all targets in this pool. +- `command`: command to run to create outputs from inputs. The command + may be strings or the return value of functions that return file-like + objects such as [`find_program()`](#find_program), + [`executable()`](#executable), [`configure_file()`](#configure_file), + [`files()`](#files), [`custom_target()`](#custom_target), etc. + Meson will automatically insert the appropriate dependencies on + targets and files listed in this keyword argument. + Note: always specify commands in array form `['commandname', + '-arg1', '-arg2']` rather than as a string `'commandname -arg1 + -arg2'` as the latter will *not* work. +- `depend_files`: files ([`string`](#string-object), + [`files()`](#files), or [`configure_file()`](#configure_file)) that + this target depends on but are not listed in the `command` keyword + argument. Useful for adding regen dependencies. +- `depends`: specifies that this target depends on the specified + target(s), even though it does not take any of them as a command + line argument. This is meant for cases where you have a tool that + e.g. does globbing internally. Usually you should just put the + generated sources as inputs and Meson will set up all dependencies + automatically. +- `depfile`: a dependency file that the command can write listing + all the additional files this target depends on, for example a C + compiler would list all the header files it included, and a change + in any one of these files triggers a recompilation +- `input`: list of source files. *(since 0.41.0)* the list is flattened. +- `install`: when true, this target is installed during the install step +- `install_dir`: directory to install to +- `install_mode` *(since 0.47.0)*: the file mode and optionally the + owner/uid and group/gid +- `output`: list of output files +- `env` *(since 0.57.0)*: environment variables to set, such as + `{'NAME1': 'value1', 'NAME2': 'value2'}` or `['NAME1=value1', 'NAME2=value2']`, + or an [`environment()` object](#environment-object) which allows more + sophisticated environment juggling. +- `feed` *(since 0.59.0)*: there are some compilers that can't be told to read + their input from a file and instead read it from standard input. When this + argument is set to true, Meson feeds the input file to `stdin`. Note that + your argument list may not contain `@INPUT@` when feed mode is active. + +The list of strings passed to the `command` keyword argument accept +the following special string substitutions: + +- `@INPUT@`: the full path to the input passed to `input`. If more than + one input is specified, all of them will be substituted as separate + arguments only if the command uses `'@INPUT@'` as a + standalone-argument. For instance, this would not work: `command : + ['cp', './@INPUT@']`, but this would: `command : ['cp', '@INPUT@']`. +- `@OUTPUT@`: the full path to the output passed to `output`. If more + than one outputs are specified, the behavior is the same as + `@INPUT@`. +- `@INPUT0@` `@INPUT1@` `...`: the full path to the input with the specified array index in `input` +- `@OUTPUT0@` `@OUTPUT1@` `...`: the full path to the output with the specified array index in `output` +- `@OUTDIR@`: the full path to the directory where the output(s) must be written +- `@DEPFILE@`: the full path to the dependency file passed to `depfile` +- `@PLAINNAME@`: the input filename, without a path +- `@BASENAME@`: the input filename, with extension removed +- `@PRIVATE_DIR@` *(since 0.50.1)*: path to a directory where the custom target must store all its intermediate files. +- `@SOURCE_ROOT@`: the path to the root of the source tree. Depending on the backend, + this may be an absolute or a relative to current workdir path. +- `@BUILD_ROOT@`: the path to the root of the build tree. Depending on the backend, + this may be an absolute or a relative to current workdir path. +- `@CURRENT_SOURCE_DIR@`: this is the directory where the currently + processed meson.build is located in. Depending on the backend, + this may be an absolute or a relative to current workdir path. + +*(since 0.47.0)* The `depfile` keyword argument also accepts the + `@BASENAME@` and `@PLAINNAME@` substitutions. + +The returned object also has methods that are documented in the +[object methods section](#custom-target-object) below. + +**Note:** Assuming that `command:` is executed by a POSIX `sh` shell +is not portable, notably to Windows. Instead, consider using a +`native: true` [executable()](#executable), or a python script. + +### declare_dependency() + +``` meson + dependency_object declare_dependency(...) +``` + +This function returns a [dependency object](#dependency-object) that +behaves like the return value of [`dependency`](#dependency) but is +internal to the current build. The main use case for this is in +subprojects. This allows a subproject to easily specify how it should +be used. This makes it interchangeable with the same dependency that +is provided externally by the system. This function has the following +keyword arguments: + +- `compile_args`: compile arguments to use. +- `dependencies`: other dependencies needed to use this dependency. +- `include_directories`: the directories to add to header search path, + must be include_directories objects or *(since 0.50.0)* plain strings +- `link_args`: link arguments to use. +- `link_with`: libraries to link against. +- `link_whole` *(since 0.46.0)*: libraries to link fully, same as [`executable`](#executable). +- `sources`: sources to add to targets (or generated header files + that should be built before sources including them are built) +- `version`: the version of this dependency, such as `1.2.3`. Defaults to the + project version. +- `variables` *(since 0.54.0)*: a dictionary of arbitrary strings, this is meant to be used + in subprojects where special variables would be provided via cmake or + pkg-config. *since 0.56.0* it can also be a list of `'key=value'` strings. + +### dependency() + +``` meson + dependency_object dependency(*dependency_name*, ...) +``` + +Finds an external dependency (usually a library installed on your +system) with the given name with `pkg-config` and [with +CMake](Dependencies.md#cmake) if `pkg-config` fails. Additionally, +frameworks (OSX only) and [library-specific fallback detection +logic](Dependencies.md#dependencies-with-custom-lookup-functionality) +are also supported. + +Dependencies can also be resolved in two other ways: + +* if the same name was used in a `meson.override_dependency` prior to + the call to `dependency`, the overriding dependency will be returned + unconditionally; that is, the overriding dependency will be used + independent of whether an external dependency is installed in the system. + Typically, `meson.override_dependency` will have been used by a + subproject. + +* by a fallback subproject which, if needed, will be brought into the current + build specification as if [`subproject()`](#subproject) had been called. + The subproject can be specified with the `fallback` argument. Alternatively, + if the `fallback` argument is absent, *since 0.55.0* Meson can + automatically identify a subproject as a fallback if a wrap file + [provides](Wrap-dependency-system-manual.md#provide-section) the + dependency, or if a subproject has the same name as the dependency. + In the latter case, the subproject must use `meson.override_dependency` to + specify the replacement, or Meson will report a hard error. See the + [Wrap documentation](Wrap-dependency-system-manual.md#provide-section) + for more details. This automatic search can be controlled using the + `allow_fallback` keyword argument. + +This function supports the following keyword arguments: + +- `default_options` *(since 0.37.0)*: an array of default option values + that override those set in the subproject's `meson_options.txt` + (like `default_options` in [`project()`](#project), they only have + effect when Meson is run for the first time, and command line + arguments override any default options in build files) +- `allow_fallback` (boolean argument, *since 0.56.0*): specifies whether Meson + should automatically pick a fallback subproject in case the dependency + is not found in the system. If `true` and the dependency is not found + on the system, Meson will fallback to a subproject that provides this + dependency. If `false`, Meson will not fallback even if a subproject + provides this dependency. By default, Meson will do so if `required` + is `true` or [`enabled`](Build-options.md#features); see the [Wrap + documentation](Wrap-dependency-system-manual.md#provide-section) + for more details. +- `fallback` (string or array argument): manually specifies a subproject + fallback to use in case the dependency is not found in the system. + This is useful if the automatic search is not applicable or if you + want to support versions of Meson older than 0.55.0. If the value is an + array `['subproj_name', 'subproj_dep']`, the first value is the name + of the subproject and the second is the variable name in that + subproject that contains a dependency object such as the return + value of [`declare_dependency`](#declare_dependency) or + [`dependency()`](#dependency), etc. Note that this means the + fallback dependency may be a not-found dependency, in which + case the value of the `required:` kwarg will be obeyed. + *Since 0.54.0* the value can be a single string, the subproject name; + in this case the subproject must use + `meson.override_dependency('dependency_name', subproj_dep)` + to specify the dependency object used in the superproject. + If the value is an empty list, it has the same effect as + `allow_fallback: false`. +- `language` *(since 0.42.0)*: defines what language-specific + dependency to find if it's available for multiple languages. +- `method`: defines the way the dependency is detected, the default is + `auto` but can be overridden to be e.g. `qmake` for Qt development, + and [different dependencies support different values]( + Dependencies.md#dependencies-with-custom-lookup-functionality) + for this (though `auto` will work on all of them) +- `native`: if set to `true`, causes Meson to find the dependency on + the build machine system rather than the host system (i.e. where the + cross compiled binary will run on), usually only needed if you build + a tool to be used during compilation. +- `not_found_message` *(since 0.50.0)*: an optional string that will + be printed as a `message()` if the dependency was not found. +- `required`: when set to false, Meson will proceed with the build + even if the dependency is not found. *(since 0.47.0)* The value of a + [`feature`](Build-options.md#features) option can also be passed. +- `static`: tells the dependency provider to try to get static + libraries instead of dynamic ones (note that this is not supported + by all dependency backends) +- `version` *(since 0.37.0)*: specifies the required version, a string containing a + comparison operator followed by the version string, examples include + `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching. + You can also specify multiple restrictions by passing a list to this + keyword argument, such as: `['>=3.14.0', '<=4.1.0']`. + These requirements are never met if the version is unknown. +- `include_type` *(since 0.52.0)*: an enum flag, marking how the dependency + flags should be converted. Supported values are `'preserve'`, `'system'` and + `'non-system'`. System dependencies may be handled differently on some + platforms, for instance, using `-isystem` instead of `-I`, where possible. + If `include_type` is set to `'preserve'`, no additional conversion will be + performed. The default value is `'preserve'`. +- other +[library-specific](Dependencies.md#dependencies-with-custom-lookup-functionality) +keywords may also be accepted (e.g. `modules` specifies submodules to use for +dependencies such as Qt5 or Boost. `components` allows the user to manually +add CMake `COMPONENTS` for the `find_package` lookup) +- `disabler` *(since 0.49.0)*: if `true` and the dependency couldn't be found, + returns a [disabler object](#disabler-object) instead of a not-found dependency. + +If dependency_name is `''`, the dependency is always not found. So +with `required: false`, this always returns a dependency object for +which the `found()` method returns `false`, and which can be passed +like any other dependency to the `dependencies:` keyword argument of a +`build_target`. This can be used to implement a dependency which is +sometimes not required e.g. in some branches of a conditional, or with +a `fallback:` kwarg, can be used to declare an optional dependency +that only looks in the specified subproject, and only if that's +allowed by `--wrap-mode`. + +The returned object also has methods that are documented in the +[object methods section](#dependency-object) below. + +### disabler() + +*(since 0.44.0)* + +Returns a [disabler object](#disabler-object). + +### error() + +``` meson + void error(message) +``` + +Print the argument string and halts the build process. + +*(since 0.58.0)* Can take more than one argument that will be separated by +space. + +### environment() + +``` meson + environment_object environment(...) +``` + +*(since 0.35.0)* + +Returns an empty [environment variable object](#environment-object). + +*(since 0.52.0)* Takes an optional dictionary as first argument. If +provided, each key/value pair is added into the `environment_object` +as if `set()` method was called for each of them. + +### executable() + +``` meson + buildtarget executable(*exe_name*, *sources*, ...) +``` + +Creates a new executable. The first argument specifies its name and +the remaining positional arguments define the input files to use. They +can be of the following types: + +- Strings relative to the current source directory +- [`files()`](#files) objects defined in any preceding build file +- The return value of configure-time generators such as [`configure_file()`](#configure_file) +- The return value of build-time generators such as + [`custom_target()`](#custom_target) or + [`generator.process()`](#generator-object) + +These input files can be sources, objects, libraries, or any other +file. Meson will automatically categorize them based on the extension +and use them accordingly. For instance, sources (`.c`, `.cpp`, +`.vala`, `.rs`, etc) will be compiled and objects (`.o`, `.obj`) and +libraries (`.so`, `.dll`, etc) will be linked. + +With the Ninja backend, Meson will create a build-time [order-only +dependency](https://ninja-build.org/manual.html#ref_dependencies) on +all generated input files, including unknown files. This is needed to +bootstrap the generation of the real dependencies in the +[depfile](https://ninja-build.org/manual.html#ref_headers) generated +by your compiler to determine when to rebuild sources. Ninja relies on +this dependency file for all input files, generated and non-generated. +The behavior is similar for other backends. + +Executable supports the following keyword arguments. Note that just +like the positional arguments above, these keyword arguments can also +be passed to [shared and static libraries](#library). + +- `_pch`: precompiled header file to use for the given language +- `_args`: compiler flags to use for the given language; + eg: `cpp_args` for C++ +- `build_by_default` *(since 0.38.0)*: causes, when set to true, to + have this target be built by default. This means it will be built when + `meson compile` is called without any arguments. The default value is + `true` for all built target types. +- `build_rpath`: a string to add to target's rpath definition in the + build dir, but which will be removed on install +- `dependencies`: one or more objects created with + [`dependency`](#dependency) or [`find_library`](#compiler-object) + (for external deps) or [`declare_dependency`](#declare_dependency) + (for deps built by the project) +- `extra_files`: not used for the build itself but are shown as + source files in IDEs that group files by targets (such as Visual + Studio) +- `gui_app`: when set to true flags this target as a GUI application + on platforms where this makes a differerence, **deprecated** since + 0.56.0, use `win_subsystem` instead. +- `link_args`: flags to use during linking. You can use UNIX-style + flags here for all platforms. +- `link_depends`: strings, files, or custom targets the link step + depends on such as a symbol visibility map. The purpose is to + automatically trigger a re-link (but not a re-compile) of the target + when this file changes. +- `link_language` *(since 0.51.0)* *(broken until 0.55.0)*: makes the linker for this + target be for the specified language. It is generally unnecessary to set + this, as Meson will detect the right linker to use in most cases. There are + only two cases where this is needed. One, your main function in an + executable is not in the language Meson picked, or second you want to force + a library to use only one ABI. +- `link_whole` *(since 0.40.0)*: links all contents of the given static libraries + whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC. + *(since 0.41.0)* If passed a list that list will be flattened. + *(since 0.51.0)* This argument also accepts outputs produced by + custom targets. The user must ensure that the output is a library in + the correct format. +- `link_with`: one or more shared or static libraries (built by this + project) that this target should be linked with. *(since 0.41.0)* If passed a + list this list will be flattened. *(since 0.51.0)* The arguments can also be custom targets. + In this case Meson will assume that merely adding the output file in the linker command + line is sufficient to make linking work. If this is not sufficient, + then the build system writer must write all other steps manually. +- `export_dynamic` *(since 0.45.0)*: when set to true causes the target's symbols to be + dynamically exported, allowing modules built using the + [`shared_module`](#shared_module) function to refer to functions, + variables and other symbols defined in the executable itself. Implies + the `implib` argument. +- `implib` *(since 0.42.0)*: when set to true, an import library is generated for the + executable (the name of the import library is based on *exe_name*). + Alternatively, when set to a string, that gives the base name for + the import library. The import library is used when the returned + build target object appears in `link_with:` elsewhere. Only has any + effect on platforms where that is meaningful (e.g. Windows). Implies + the `export_dynamic` argument. +- `implicit_include_directories` *(since 0.42.0)*: a boolean telling whether Meson + adds the current source and build directories to the include path, + defaults to `true`. +- `include_directories`: one or more objects created with the + `include_directories` function, or *(since 0.50.0)* strings, which + will be transparently expanded to include directory objects +- `install`: when set to true, this executable should be installed, defaults to `false` +- `install_dir`: override install directory for this file. The value is + relative to the `prefix` specified. F.ex, if you want to install + plugins into a subdir, you'd use something like this: `install_dir : + get_option('libdir') / 'projectname-1.0'`. +- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format + and optionally the owner/uid and group/gid for the installed files. +- `install_rpath`: a string to set the target's rpath to after install + (but *not* before that). On Windows, this argument has no effect. +- `objects`: list of prebuilt object files (usually for third party + products you don't have source to) that should be linked in this + target, **never** use this for object files that you build yourself. +- `name_suffix`: the string that will be used as the extension for the + target by overriding the default. By default on Windows this is + `exe` and on other platforms it is omitted. Set this to `[]`, or omit + the keyword argument for the default behaviour. +- `override_options` *(since 0.40.0)*: takes an array of strings in the same format as + `project`'s `default_options` overriding the values of these options + for this target only. +- `gnu_symbol_visibility` *(since 0.48.0)*: specifies how symbols should be exported, see + e.g [the GCC Wiki](https://gcc.gnu.org/wiki/Visibility) for more + information. This value can either be an empty string or one of + `default`, `internal`, `hidden`, `protected` or `inlineshidden`, which + is the same as `hidden` but also includes things like C++ implicit + constructors as specified in the GCC manual. Ignored on compilers that + do not support GNU visibility arguments. +- `d_import_dirs`: list of directories to look in for string imports used + in the D programming language +- `d_unittest`: when set to true, the D modules are compiled in debug mode +- `d_module_versions`: list of module version identifiers set when compiling D sources +- `d_debug`: list of module debug identifiers set when compiling D sources +- `pie` *(since 0.49.0)*: build a position-independent executable +- `native`: is a boolean controlling whether the target is compiled for the + build or host machines. Defaults to false, building for the host machine. +- `win_subsystem` *(since 0.56.0)* specifies the subsystem type to use + on the Windows platform. Typical values include `console` for text + mode programs and `windows` for gui apps. The value can also contain + version specification such as `windows,6.0`. See [MSDN + documentation](https://docs.microsoft.com/en-us/cpp/build/reference/subsystem-specify-subsystem) + for the full list. The default value is `console`. + +The list of `sources`, `objects`, and `dependencies` is always +flattened, which means you can freely nest and add lists while +creating the final list. + +The returned object also has methods that are documented in the +[object methods section](#build-target-object) below. + +### find_library() + +*(since 0.31.0)* **(deprecated)** Use `find_library()` method of +[the compiler object](#compiler-object) as obtained from +`meson.get_compiler(lang)`. + +### find_program() + +``` meson + program find_program(program_name1, program_name2, ...) +``` + +`program_name1` here is a string that can be an executable or script +to be searched for in `PATH` or other places inside the project. +The search order is: + +1. Program overrides set via [`meson.override_find_program()`](Reference-manual.md#meson-object) +1. [`[provide]` sections](Wrap-dependency-system-manual.md#provide-section) + in subproject wrap files, if [`wrap_mode`](Builtin-options.md#core-options) is + set to `forcefallback` +1. [`[binaries]` section](Machine-files.md#binaries) in your machine files +1. Directories provided using the `dirs:` kwarg (see below) +1. Project's source tree relative to the current subdir + - If you use the return value of [`configure_file()`](#configure_file), the + current subdir inside the build tree is used instead +1. `PATH` environment variable +1. [`[provide]` sections](Wrap-dependency-system-manual.md#provide-section) in + subproject wrap files, if [`wrap_mode`](Builtin-options.md#core-options) is + set to anything other than `nofallback` + +*(since 0.37.0)* `program_name2` and later positional arguments are used as fallback +strings to search for. This is meant to be used for cases where the +program may have many alternative names, such as `foo` and +`foo.py`. The function will check for the arguments one by one and the +first one that is found is returned. + +Keyword arguments are the following: + +- `required` By default, `required` is set to `true` and Meson will + abort if no program can be found. If `required` is set to `false`, + Meson continue even if none of the programs can be found. You can + then use the `.found()` method on the [returned object](#external-program-object) to check + whether it was found or not. *(since 0.47.0)* The value of a + [`feature`](Build-options.md#features) option can also be passed to the + `required` keyword argument. + +- `native` *(since 0.43.0)*: defines how this executable should be searched. By default + it is set to `false`, which causes Meson to first look for the + executable in the cross file (when cross building) and if it is not + defined there, then from the system. If set to `true`, the cross + file is ignored and the program is only searched from the system. + +- `disabler` *(since 0.49.0)*: if `true` and the program couldn't be found, return a + [disabler object](#disabler-object) instead of a not-found object. + + +- `version` *(since 0.52.0)*: specifies the required version, see + [`dependency()`](#dependency) for argument format. The version of the program + is determined by running `program_name --version` command. If stdout is empty + it fallbacks to stderr. If the output contains more text than simply a version + number, only the first occurrence of numbers separated by dots is kept. + If the output is more complicated than that, the version checking will have to + be done manually using [`run_command()`](#run_command). + +- `dirs` *(since 0.53.0)*: extra list of absolute paths where to look for program + names. + +Meson will also autodetect scripts with a shebang line and run them +with the executable/interpreter specified in it both on Windows +(because the command invocator will reject the command otherwise) and +Unixes (if the script file does not have the executable bit set). +Hence, you *must not* manually add the interpreter while using this +script as part of a list of commands. + +If you need to check for a program in a non-standard location, you can +just pass an absolute path to `find_program`, e.g. + +```meson +setcap = find_program('setcap', '/usr/sbin/setcap', '/sbin/setcap', required : false) +``` + +It is also possible to pass an array to `find_program` in case you +need to construct the set of paths to search on the fly: + +```meson +setcap = find_program(['setcap', '/usr/sbin/setcap', '/sbin/setcap'], required : false) +``` + +The returned object also has methods that are documented in the +[object methods section](#external-program-object) below. + +### files() + +``` meson + file_array files(list_of_filenames) +``` + +This command takes the strings given to it in arguments and returns +corresponding File objects that you can use as sources for build +targets. The difference is that file objects remember the subdirectory +they were defined in and can be used anywhere in the source tree. As +an example suppose you have source file `foo.cpp` in subdirectory +`bar1` and you would like to use it in a build target that is defined +in `bar2`. To make this happen you first create the object in `bar1` +like this: + +```meson + foofile = files('foo.cpp') +``` + +Then you can use it in `bar2` like this: + +```meson + executable('myprog', 'myprog.cpp', foofile, ...) +``` + +Meson will then do the right thing. + +### generator() + +``` meson + generator_object generator(*executable*, ...) +``` + +See also: [`custom_target`](#custom_target) + +This function creates a [generator object](#generator-object) that can +be used to run custom compilation commands. The only positional +argument is the executable to use. It can either be a self-built +executable or one returned by find_program. Keyword arguments are the +following: + +- `arguments`: a list of template strings that will be the command line + arguments passed to the executable +- `depends` *(since 0.51.0)*: is an array of build targets that must be built before this + generator can be run. This is used if you have a generator that calls + a second executable that is built in this project. +- `depfile`: is a template string pointing to a dependency file that a + generator can write listing all the additional files this target + depends on, for example a C compiler would list all the header files + it included, and a change in any one of these files triggers a + recompilation +- `output`: a template string (or list of template strings) defining + how an output file name is (or multiple output names are) generated + from a single source file name +- `capture` *(since 0.43.0)*: when this argument is set to true, Meson + captures `stdout` of the `executable` and writes it to the target file + specified as `output`. + +The returned object also has methods that are documented in the +[object methods section](#generator-object) below. + +The template strings passed to all the above keyword arguments accept +the following special substitutions: + +- `@PLAINNAME@`: the complete input file name, e.g: `foo.c` becomes `foo.c` (unchanged) +- `@BASENAME@`: the base of the input filename, e.g.: `foo.c.y` becomes `foo.c` (extension is removed) + +Each string passed to the `output` keyword argument *must* be +constructed using one or both of these two substitutions. + +In addition to the above substitutions, the `arguments` keyword +argument also accepts the following: + +- `@OUTPUT@`: the full path to the output file +- `@INPUT@`: the full path to the input file +- `@DEPFILE@`: the full path to the depfile +- `@SOURCE_DIR@`: the full path to the root of the source tree +- `@CURRENT_SOURCE_DIR@`: this is the directory where the currently processed meson.build is located in +- `@BUILD_DIR@`: the full path to the root of the build dir where the output will be placed + +NOTE: Generators should only be used for outputs that will ***only*** +be used as inputs for a [build target](#build_target) or a [custom +target](#custom_target). When you use the processed output of a +generator in multiple targets, the generator will be run multiple +times to create outputs for each target. Each output will be created +in a target-private directory `@BUILD_DIR@`. + +If you want to generate files for general purposes such as for +generating headers to be used by several sources, or data that will be +installed, and so on, use a [`custom_target`](#custom_target) instead. + +### get_option() + +``` meson + value get_option(option_name) +``` + +Obtains the value of the [project build option](Build-options.md) +specified in the positional argument. + +Note that the value returned for built-in options that end in `dir` +such as `bindir` and `libdir` is always a path relative to (and +inside) the `prefix`. + +The only exceptions are: `sysconfdir`, `localstatedir`, and +`sharedstatedir` which will return the value passed during +configuration as-is, which may be absolute, or relative to `prefix`. +[`install_dir` arguments](Installing.md) handles that as expected, but +if you need the absolute path to one of these e.g. to use in a define +etc., you should use `get_option('prefix') / +get_option('localstatedir')` + +For options of type `feature` a +[feature option object](#feature-option-object) +is returned instead of a string. +See [`feature` options](Build-options.md#features) +documentation for more details. + +### get_variable() + +``` meson + value get_variable(variable_name, fallback) +``` + +This function can be used to dynamically obtain a variable. `res = +get_variable(varname, fallback)` takes the value of `varname` (which +must be a string) and stores the variable of that name into `res`. If +the variable does not exist, the variable `fallback` is stored to +`res`instead. If a fallback is not specified, then attempting to read +a non-existing variable will cause a fatal error. + +### import() + +``` + module_object import(string, required : bool | feature, disabler : bool) +``` + +Imports the given extension module. Returns an object that can be used to call +the methods of the module. Here's an example for a hypothetical `testmod` +module. + +```meson + tmod = import('testmod') + tmod.do_something() +``` + +*Since 0.59.0* the required and disabler keyword arguments + +### include_directories() + +``` meson + include_object include_directories(directory_names, ...) +``` + +Returns an opaque object which contains the directories (relative to +the current directory) given in the positional arguments. The result +can then be passed to the `include_directories:` keyword argument when +building executables or libraries. You can use the returned object in +any subdirectory you want, Meson will make the paths work +automatically. + +Note that this function call itself does not add the directories into +the search path, since there is no global search path. For something +like that, see [`add_project_arguments()`](#add_project_arguments). + +See also `implicit_include_directories` parameter of +[executable()](#executable), which adds current source and build +directories to include path. + +Each directory given is converted to two include paths: one that is +relative to the source root and one relative to the build root. + +For example, with the following source tree layout in +`/home/user/project.git`: + +`meson.build`: +```meson +project(...) + +subdir('include') +subdir('src') + +... +``` + +`include/meson.build`: +```meson +inc = include_directories('.') + +... +``` + +`src/meson.build`: +```meson +sources = [...] + +executable('some-tool', sources, + include_directories : inc, + ...) + +... +``` + +If the build tree is `/tmp/build-tree`, the following include paths +will be added to the `executable()` call: `-I/tmp/build-tree/include +-I/home/user/project.git/include`. + +This function has one keyword argument `is_system` which, if set, +flags the specified directories as system directories. This means that +they will be used with the `-isystem` compiler argument rather than +`-I` on compilers that support this flag (in practice everything +except Visual Studio). + +### install_data() + +``` meson + void install_data(list_of_files, ...) +``` + +Installs files from the source tree that are listed as positional +arguments. The following keyword arguments are supported: + +- `install_dir`: the absolute or relative path to the installation + directory. If this is a relative path, it is assumed to be relative + to the prefix. + + If omitted, the directory defaults to `{datadir}/{projectname}` *(since 0.45.0)*. + +- `install_mode`: specify the file mode in symbolic format and + optionally the owner/uid and group/gid for the installed files. For + example: + + `install_mode: 'rw-r--r--'` for just the file mode + + `install_mode: ['rw-r--r--', 'nobody', 'nogroup']` for the file mode and the user/group + + `install_mode: ['rw-r-----', 0, 0]` for the file mode and uid/gid + + To leave any of these three as the default, specify `false`. + +- `rename` *(since 0.46.0)*: if specified renames each source file into corresponding + file from `rename` list. Nested paths are allowed and they are + joined with `install_dir`. Length of `rename` list must be equal to + the number of sources. + +See [Installing](Installing.md) for more examples. + +### install_headers() + +``` meson + void install_headers(list_of_headers, ...) +``` + +Installs the specified header files from the source tree into the +system header directory (usually `/{prefix}/include`) during the +install step. This directory can be overridden by specifying it with +the `install_dir` keyword argument. If you just want to install into a +subdirectory of the system header directory, then use the `subdir` +argument. As an example if this has the value `myproj` then the +headers would be installed to `/{prefix}/include/myproj`. + +For example, this will install `common.h` and `kola.h` into +`/{prefix}/include`: + +```meson +install_headers('common.h', 'proj/kola.h') +``` + +This will install `common.h` and `kola.h` into `/{prefix}/include/myproj`: + +```meson +install_headers('common.h', 'proj/kola.h', subdir : 'myproj') +``` + +This will install `common.h` and `kola.h` into `/{prefix}/cust/myproj`: + +```meson +install_headers('common.h', 'proj/kola.h', install_dir : 'cust', subdir : 'myproj') +``` + +Accepts the following keywords: + +- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic + format and optionally the owner/uid and group/gid for the installed files. + An example value could be `['rwxr-sr-x', 'root', 'root']`. + +### install_man() + +``` meson + void install_man(list_of_manpages, ...) +``` + +Installs the specified man files from the source tree into system's +man directory during the install step. This directory can be +overridden by specifying it with the `install_dir` keyword argument. + +Accepts the following keywords: + +- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic + format and optionally the owner/uid and group/gid for the installed files. + An example value could be `['rwxr-sr-x', 'root', 'root']`. + +- `locale` *(since 0.58.0)*: can be used to specify the locale into which the + man page will be installed within the manual page directory tree. + An example manual might be `foo.fr.1` with a locale of `fr`, such + that `{mandir}/{locale}/man{num}/foo.1` becomes the installed file. + +*(since 0.49.0)* [manpages are no longer compressed + implicitly][install_man_49]. + +[install_man_49]: +https://mesonbuild.com/Release-notes-for-0-49-0.html#manpages-are-no-longer-compressed-implicitly + +### install_subdir() + +``` meson + void install_subdir(subdir_name, + install_dir : ..., + exclude_files : ..., + exclude_directories : ..., + strip_directory : ...) +``` + +Installs the entire given subdirectory and its contents from the +source tree to the location specified by the keyword argument +`install_dir`. + +If the subdirectory does not exist in the source tree, an empty directory is +created in the specified location. *(since 0.45.0)* A newly created +subdirectory may only be created in the keyword argument `install_dir`. + +The following keyword arguments are supported: + +- `exclude_files`: a list of file names that should not be installed. + Names are interpreted as paths relative to the `subdir_name` location. +- `exclude_directories`: a list of directory names that should not be installed. + Names are interpreted as paths relative to the `subdir_name` location. +- `install_dir`: the location to place the installed subdirectory. +- `install_mode` *(since 0.47.0)*: the file mode in symbolic format and optionally + the owner/uid and group/gid for the installed files. +- `strip_directory` *(since 0.45.0)*: install directory contents. `strip_directory=false` by default. + If `strip_directory=true` only the last component of the source path is used. + +For a given directory `foo`: +```text +foo/ + bar/ + file1 + file2 +``` +`install_subdir('foo', install_dir : 'share', strip_directory : false)` creates +```text +share/ + foo/ + bar/ + file1 + file2 +``` + +`install_subdir('foo', install_dir : 'share', strip_directory : true)` creates +```text +share/ + bar/ + file1 + file2 +``` + +`install_subdir('foo/bar', install_dir : 'share', strip_directory : false)` creates +```text +share/ + bar/ + file1 +``` + +`install_subdir('foo/bar', install_dir : 'share', strip_directory : true)` creates +```text +share/ + file1 +``` + +`install_subdir('new_directory', install_dir : 'share')` creates +```text +share/ + new_directory/ +``` + +### is_disabler() + +``` meson + bool is_disabler(var) +``` + +*(since 0.52.0)* + +Returns true if a variable is a disabler and false otherwise. + +### is_variable() + +``` meson + bool is_variable(varname) +``` + +Returns true if a variable of the given name exists and false otherwise. + +### jar() + +```meson + jar_object jar(name, list_of_sources, ...) +``` + +Build a jar from the specified Java source files. Keyword arguments +are the same as [`executable`](#executable)'s, with the addition of +`main_class` which specifies the main class to execute when running +the jar with `java -jar file.jar`. + +### join_paths() + +``` meson +string join_paths(string1, string2, ...) +``` + +*(since 0.36.0)* + +Joins the given strings into a file system path segment. For example +`join_paths('foo', 'bar')` results in `foo/bar`. If any one of the +individual segments is an absolute path, all segments before it are +dropped. That means that `join_paths('foo', '/bar')` returns `/bar`. + +**Warning** Don't use `join_paths()` for sources in [`library`](#library) and +[`executable`](#executable), you should use [`files`](#files) instead. + +*(since 0.49.0)* Using the`/` operator on strings is equivalent to calling +`join_paths`. + +```meson +# res1 and res2 will have identical values +res1 = join_paths(foo, bar) +res2 = foo / bar +``` + +### library() + +``` meson + buildtarget library(library_name, list_of_sources, ...) +``` + +Builds a library that is either static, shared or both depending on +the value of `default_library` +user [option](https://mesonbuild.com/Builtin-options.html). +You should use this instead of [`shared_library`](#shared_library), +[`static_library`](#static_library) or +[`both_libraries`](#both_libraries) most of the time. This allows you +to toggle your entire project (including subprojects) from shared to +static with only one option. This option applies to libraries being +built internal to the entire project. For external dependencies, the +default library type preferred is shared. This can be adapted on a per +library basis using the [dependency()](#dependency)) `static` keyword. + +The keyword arguments for this are the same as for +[`executable`](#executable) with the following additions: + +- `name_prefix`: the string that will be used as the prefix for the + target output filename by overriding the default (only used for + libraries). By default this is `lib` on all platforms and compilers, + except for MSVC shared libraries where it is omitted to follow + convention, and Cygwin shared libraries where it is `cyg`. +- `name_suffix`: the string that will be used as the suffix for the + target output filename by overriding the default (see also: + [executable()](#executable)). By default, for shared libraries this + is `dylib` on macOS, `dll` on Windows, and `so` everywhere else. + For static libraries, it is `a` everywhere. By convention MSVC + static libraries use the `lib` suffix, but we use `a` to avoid a + potential name clash with shared libraries which also generate + import libraries with a `lib` suffix. +- `rust_crate_type`: specifies the crate type for Rust + libraries. Defaults to `dylib` for shared libraries and `rlib` for + static libraries. + +`static_library`, `shared_library` and `both_libraries` also accept +these keyword arguments. + +Note: You can set `name_prefix` and `name_suffix` to `[]`, or omit +them for the default behaviour for each platform. + +### message() + +``` meson + void message(text) +``` + +This function prints its argument to stdout. + +*(since 0.54.0)* Can take more than one argument that will be +separated by space. + +### warning() + +``` meson + void warning(text) +``` + +*(since 0.44.0)* + +This function prints its argument to stdout prefixed with WARNING:. + +*(since 0.54.0)* Can take more than one argument that will be separated by +space. + +### summary() + +``` meson + void summary(key, value) + void summary(dictionary) +``` + +*(since 0.53.0)* + +This function is used to summarize build configuration at the end of the build +process. This function provides a way for projects (and subprojects) to report +this information in a clear way. + +The content is a series of key/value pairs grouped into sections. If +the section keyword argument is omitted, those key/value pairs are +implicitly grouped into a section with no title. key/value pairs can +optionally be grouped into a dictionary, but keep in mind that +dictionaries does not guarantee ordering. `key` must be string, +`value` can be: + +- an integer, boolean or string +- *since 0.57.0* an external program or a dependency +- *since 0.58.0* a feature option +- a list of those. + +`summary()` can be called multiple times as long as the same +section/key pair doesn't appear twice. All sections will be collected +and printed at the end of the configuration in the same order as they +have been called. + +Keyword arguments: +- `section`: title to group a set of key/value pairs. +- `bool_yn`: if set to true, all boolean values will be replaced by green YES + or red NO. +- `list_sep` *(since 0.54.0)*: string used to separate list values (e.g. `', '`). + +Example: +```meson +project('My Project', version : '1.0') +summary({'bindir': get_option('bindir'), + 'libdir': get_option('libdir'), + 'datadir': get_option('datadir'), + }, section: 'Directories') +summary({'Some boolean': false, + 'Another boolean': true, + 'Some string': 'Hello World', + 'A list': ['string', 1, true], + }, section: 'Configuration') +``` + +Output: +``` +My Project 1.0 + + Directories + prefix : /opt/gnome + bindir : bin + libdir : lib/x86_64-linux-gnu + datadir : share + + Configuration + Some boolean : False + Another boolean: True + Some string : Hello World + A list : string + 1 + True +``` + +### project() + +``` meson + void project(project_name, list_of_languages, ...) +``` + +The first argument to this function must be a string defining the name +of this project. + +The project name can be any string you want, it's not used for +anything except descriptive purposes. However since it is written to +e.g. the dependency manifest is usually makes sense to have it be the +same as the project tarball or pkg-config name. So for example you +would probably want to use the name _libfoobar_ instead of _The Foobar +Library_. + +It may be followed by the list of programming languages that the project uses. + +*(since 0.40.0)* The list of languages is optional. + +These languages may be used both for `native: false` (the default) +(host machine) targets and for `native: true` (build machine) targets. +*(since 0.56.0)* The build machine compilers for the specified +languages are not required. + +Supported values for languages are `c`, `cpp` (for `C++`), `cuda`, `d`, +`objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`), `vala` and `rust`. + +Project supports the following keyword arguments. + +- `default_options`: takes an array of strings. The strings are in the + form `key=value` and have the same format as options to + `meson configure`. For example to set the default project type you would + set this: `default_options : ['buildtype=debugoptimized']`. Note + that these settings are only used when running Meson for the first + time. Global options such as `buildtype` can only be specified in + the master project, settings in subprojects are ignored. Project + specific options are used normally even in subprojects. + + +- `license`: takes a string or array of strings describing the license(s) the + code is under. To avoid ambiguity it is recommended to use a standardized + license identifier from the [SPDX license list](https://spdx.org/licenses/). + Usually this would be something like `license : 'GPL-2.0-or-later'`, but if + the code has multiple licenses you can specify them as an array like this: + `license : ['proprietary', 'GPL-3.0-only']`. Note that the text is informal + and is only written to the dependency manifest. Meson does not do any license + validation, you are responsible for verifying that you abide by all licensing + terms. You can access the value in your Meson build files with + `meson.project_license()`. + +- `meson_version`: takes a string describing which Meson version the + project requires. Usually something like `>=0.28.0`. + +- `subproject_dir`: specifies the top level directory name that holds + Meson subprojects. This is only meant as a compatibility option + for existing code bases that house their embedded source code in a + custom directory. All new projects should not set this but instead + use the default value. It should be noted that this keyword + argument is ignored inside subprojects. There can be only one + subproject dir and it is set in the top level Meson file. + +- `version`: which is a free form string describing the version of + this project. You can access the value in your Meson build files + with `meson.project_version()`. Since 0.57.0 this can also be a + `File` object pointing to a file that contains exactly one line of + text. + +### run_command() + +``` meson + runresult run_command(command, list_of_args, ...) +``` + +Runs the command specified in positional arguments. `command` can be a +string, or the output of [`find_program()`](#find_program), +[`files()`](#files) or [`configure_file()`](#configure_file), or [a +compiler object](#compiler-object). + +Returns [an opaque object](#run-result-object) containing the result +of the invocation. The command is run from an *unspecified* directory, +and Meson will set three environment variables `MESON_SOURCE_ROOT`, +`MESON_BUILD_ROOT` and `MESON_SUBDIR` that specify the source +directory, build directory and subdirectory the target was defined in, +respectively. + +This function supports the following keyword arguments: + + - `check` *(since 0.47.0)*: takes a boolean. If `true`, the exit status code of the command will + be checked, and the configuration will fail if it is non-zero. The default is + `false`. + - `env` *(since 0.50.0)*: environment variables to set, such as `['NAME1=value1', + 'NAME2=value2']`, or an [`environment()` + object](#environment-object) which allows more sophisticated + environment juggling. *(since 0.52.0)* A dictionary is also accepted. + +See also [External commands](External-commands.md). + +### run_target + +``` meson +runtarget run_target(target_name, ...) +``` + +This function creates a new top-level target that runs a specified +command with the specified arguments. Like all top-level targets, this +integrates with the selected backend. For instance, you can run it as +`meson compile target_name`. Note that a run target produces no output +as far as Meson is concerned. It is only meant for tasks such as +running a code formatter or flashing an external device's firmware +with a built file. + +The command is run from an *unspecified* directory, and Meson will set +three environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT` +and `MESON_SUBDIR` that specify the source directory, build directory +and subdirectory the target was defined in, respectively. + + - `command` is a list containing the command to run and the arguments + to pass to it. Each list item may be a string or a target. For + instance, passing the return value of [`executable()`](#executable) + as the first item will run that executable, or passing a string as + the first item will find that command in `PATH` and run it. +- `depends` is a list of targets that this target depends on but which + are not listed in the command array (because, for example, the + script does file globbing internally) +- `env` *(since 0.57.0)*: environment variables to set, such as + `{'NAME1': 'value1', 'NAME2': 'value2'}` or `['NAME1=value1', 'NAME2=value2']`, + or an [`environment()` object](#environment-object) which allows more + sophisticated environment juggling. + +*Since 0.57.0* The template strings passed to `command` keyword arguments accept +the following special substitutions: +- `@SOURCE_ROOT@`: the path to the root of the source tree. Depending on the backend, + this may be an absolute or a relative to current workdir path. +- `@BUILD_ROOT@`: the path to the root of the build tree. Depending on the backend, + this may be an absolute or a relative to current workdir path. +- `@CURRENT_SOURCE_DIR@` *Since 0.57.1*: this is the directory where the currently + processed meson.build is located in. Depending on the backend, + this may be an absolute or a relative to current workdir path. + +### set_variable() + +``` meson + void set_variable(variable_name, value) +``` + +Assigns a value to the given variable name. Calling +`set_variable('foo', bar)` is equivalent to `foo = bar`. + +*(since 0.46.1)* The `value` parameter can be an array type. + +### shared_library() + +``` meson + buildtarget shared_library(library_name, list_of_sources, ...) +``` + +Builds a shared library with the given sources. Positional and keyword +arguments are the same as for [`library`](#library) with the following +extra keyword arguments. + +- `soversion`: a string specifying the soversion of this shared + library, such as `0`. On Linux and Windows this is used to set the + soversion (or equivalent) in the filename. For example, if + `soversion` is `4`, a Windows DLL will be called `foo-4.dll` and one + of the aliases of the Linux shared library would be + `libfoo.so.4`. If this is not specified, the first part of `version` + is used instead (see below). For example, if `version` is `3.6.0` and + `soversion` is not defined, it is set to `3`. +- `version`: a string specifying the version of this shared library, + such as `1.1.0`. On Linux and OS X, this is used to set the shared + library version in the filename, such as `libfoo.so.1.1.0` and + `libfoo.1.1.0.dylib`. If this is not specified, `soversion` is used + instead (see above). +- `darwin_versions` *(since 0.48.0)*: an integer, string, or a list of + versions to use for setting dylib `compatibility version` and + `current version` on macOS. If a list is specified, it must be + either zero, one, or two elements. If only one element is specified + or if it's not a list, the specified value will be used for setting + both compatibility version and current version. If unspecified, the + `soversion` will be used as per the aforementioned rules. +- `vs_module_defs`: a string, a File object, or Custom Target for a + Microsoft module definition file for controlling symbol exports, + etc., on platforms where that is possible (e.g. Windows). + +### shared_module() + +``` meson + buildtarget shared_module(module_name, list_of_sources, ...) +``` + +*(since 0.37.0)* + +Builds a shared module with the given sources. Positional and keyword +arguments are the same as for [`library`](#library). + +This is useful for building modules that will be `dlopen()`ed and +hence may contain undefined symbols that will be provided by the +library that is loading it. + +If you want the shared module to be able to refer to functions and +variables defined in the [`executable`](#executable) it is loaded by, +you will need to set the `export_dynamic` argument of the executable to +`true`. + +Supports the following extra keyword arguments: + +- `vs_module_defs` *(since 0.52.0)*: a string, a File object, or + Custom Target for a Microsoft module definition file for controlling + symbol exports, etc., on platforms where that is possible + (e.g. Windows). + +**Note:** Linking to a shared module is not supported on some +platforms, notably OSX. Consider using a +[`shared_library`](#shared_library) instead, if you need to both +`dlopen()` and link with a library. + +### static_library() + +``` meson + buildtarget static_library(library_name, list_of_sources, ...) +``` + +Builds a static library with the given sources. Positional and keyword +arguments are as for [`library`](#library), as well as: + + - `pic` *(since 0.36.0)*: builds the library as positional + independent code (so it can be linked into a shared library). This + option has no effect on Windows and OS X since it doesn't make + sense on Windows and PIC cannot be disabled on OS X. + +- `prelink` *since0.57.0*: if `true` the object files in the target + will be prelinked, meaning that it will contain only one prelinked + object file rather than the individual object files. + +### subdir() + +``` meson + void subdir(dir_name, ...) +``` + +Enters the specified subdirectory and executes the `meson.build` file +in it. Once that is done, it returns and execution continues on the +line following this `subdir()` command. Variables defined in that +`meson.build` file are then available for use in later parts of the +current build file and in all subsequent build files executed with +`subdir()`. + +Note that this means that each `meson.build` file in a source tree can +and must only be executed once. + +This function has one keyword argument. + + - `if_found`: takes one or several dependency objects and will only + recurse in the subdir if they all return `true` when queried with + `.found()` + +### subdir_done() + +``` meson + subdir_done() +``` + +Stops further interpretation of the Meson script file from the point +of the invocation. All steps executed up to this point are valid and +will be executed by Meson. This means that all targets defined before +the call of `subdir_done` will be build. + +If the current script was called by `subdir` the execution returns to +the calling directory and continues as if the script had reached the +end. If the current script is the top level script Meson configures +the project as defined up to this point. + +Example: +```meson +project('example exit', 'cpp') +executable('exe1', 'exe1.cpp') +subdir_done() +executable('exe2', 'exe2.cpp') +``` + +The executable `exe1` will be build, while the executable `exe2` is not +build. + +### subproject() + +``` meson + subproject_object subproject(subproject_name, ...) +``` + +Takes the project specified in the positional argument and brings that +in the current build specification by returning a [subproject +object](#subproject-object). Subprojects must always be placed inside +the `subprojects` directory at the top source directory. So for +example a subproject called `foo` must be located in +`${MESON_SOURCE_ROOT}/subprojects/foo`. Supports the following keyword +arguments: + + - `default_options` *(since 0.37.0)*: an array of default option values + that override those set in the subproject's `meson_options.txt` + (like `default_options` in `project`, they only have effect when + Meson is run for the first time, and command line arguments override + any default options in build files). *(since 0.54.0)*: `default_library` + built-in option can also be overridden. + - `version`: works just like the same as in `dependency`. + It specifies what version the subproject should be, as an example `>=1.0.1` + - `required` *(since 0.48.0)*: By default, `required` is `true` and + Meson will abort if the subproject could not be setup. You can set + this to `false` and then use the `.found()` method on the [returned + object](#subproject-object). You may also pass the value of a + [`feature`](Build-options.md#features) option, same as + [`dependency()`](#dependency). + +Note that you can use the returned [subproject +object](#subproject-object) to access any variable in the +subproject. However, if you want to use a dependency object from +inside a subproject, an easier way is to use the `fallback:` keyword +argument to [`dependency()`](#dependency). + +[See additional documentation](Subprojects.md). + +### test() + +``` meson + void test(name, executable, ...) +``` + +Defines a test to run with the test harness. Takes two positional +arguments, the first is the name of the test and the second is the +executable to run. The executable can be an [executable build target +object](#build-target-object) returned by +[`executable()`](#executable) or an [external program +object](#external-program-object) returned by +[`find_program()`](#find_program). + +*(since 0.55.0)* When cross compiling, if an exe_wrapper is needed and +defined the environment variable `MESON_EXE_WRAPPER` will be set to +the string value of that wrapper (implementation detail: using +`mesonlib.join_args`). Test scripts may use this to run cross built +binaries. If your test needs `MESON_EXE_WRAPPER` in cross build +situations it is your responsibility to return code 77 to tell the +harness to report "skip". + +By default, environment variable +[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) +is automatically set by `meson test` to a random value between 1..255. +This can help find memory leaks on configurations using glibc, +including with non-GCC compilers. However, this can have a performance +impact, and may fail a test due to external libraries whose internals +are out of the user's control. To check if this feature is causing an +expected runtime crash, disable the feature by temporarily setting +environment variable `MALLOC_PERTURB_=0`. While it's preferable to +only temporarily disable this check, if a project requires permanent +disabling of this check in meson.build do like: + +```meson +nomalloc = environment({'MALLOC_PERTURB_': '0'}) + +test(..., env: nomalloc, ...) +``` + +#### test() Keyword arguments + +- `args`: arguments to pass to the executable + +- `env`: environment variables to set, such as `['NAME1=value1', + 'NAME2=value2']`, or an [`environment()` + object](#environment-object) which allows more sophisticated + environment juggling. *(since 0.52.0)* A dictionary is also accepted. + +- `is_parallel`: when false, specifies that no other test must be + running at the same time as this test + +- `should_fail`: when true the test is considered passed if the + executable returns a non-zero return value (i.e. reports an error) + +- `suite`: `'label'` (or list of labels `['label1', 'label2']`) + attached to this test. The suite name is qualified by a (sub)project + name resulting in `(sub)project_name:label`. In the case of a list + of strings, the suite names will be `(sub)project_name:label1`, + `(sub)project_name:label2`, etc. + +- `timeout`: the amount of seconds the test is allowed to run, a test + that exceeds its time limit is always considered failed, defaults to + 30 seconds. *Since 0.57* if timeout is `<= 0` the test has infinite duration, + in previous versions of Meson the test would fail with a timeout immediately. + +- `workdir`: absolute path that will be used as the working directory + for the test + +- `depends` *(since 0.46.0)*: specifies that this test depends on the specified + target(s), even though it does not take any of them as a command + line argument. This is meant for cases where test finds those + targets internally, e.g. plugins or globbing. Those targets are built + before test is executed even if they have `build_by_default : false`. + +- `protocol` *(since 0.50.0)*: specifies how the test results are parsed and can + be one of `exitcode`, `tap`, or `gtest`. For more information about test + harness protocol read [Unit Tests](Unit-tests.md). The following values are + accepted: + - `exitcode`: the executable's exit code is used by the test harness + to record the outcome of the test). + - `tap`: [Test Anything Protocol](https://www.testanything.org/). + - `gtest` *(since 0.55.0)*: for Google Tests. + - `rust` *(since 0.56.0)*: for native rust tests + +- `priority` *(since 0.52.0)*:specifies the priority of a test. Tests with a + higher priority are *started* before tests with a lower priority. + The starting order of tests with identical priorities is + implementation-defined. The default priority is 0, negative numbers are + permitted. + +Defined tests can be run in a backend-agnostic way by calling +`meson test` inside the build dir, or by using backend-specific +commands, such as `ninja test` or `msbuild RUN_TESTS.vcxproj`. + +### vcs_tag() + +``` meson + customtarget vcs_tag(...) +``` + +This command detects revision control commit information at build time +and places it in the specified output file. This file is guaranteed to +be up to date on every build. Keywords are similar to `custom_target`. + +- `command`: string list with the command to execute, see + [`custom_target`](#custom_target) for details on how this command + must be specified +- `fallback`: version number to use when no revision control + information is present, such as when building from a release tarball + (defaults to `meson.project_version()`) +- `input`: file to modify (e.g. `version.c.in`) (required) +- `output`: file to write the results to (e.g. `version.c`) (required) +- `replace_string`: string in the input file to substitute with the + commit information (defaults to `@VCS_TAG@`) + +Meson will read the contents of `input`, substitute the +`replace_string` with the detected revision number, and write the +result to `output`. This method returns a +[`custom_target`](#custom_target) object that (as usual) should be +used to signal dependencies if other targets use the file outputted +by this. + +For example, if you generate a header with this and want to use that +in a build target, you must add the return value to the sources of +that build target. Without that, Meson will not know the order in +which to build the targets. + +If you desire more specific behavior than what this command provides, +you should use `custom_target`. + +### range() + +``` meson + rangeobject range(stop) + rangeobject range(start, stop[, step]) +``` + +*Since 0.58.0* + +Return an opaque object that can be only be used in `foreach` statements. +- `start` must be integer greater or equal to 0. Defaults to 0. +- `stop` must be integer greater or equal to `start`. +- `step` must be integer greater or equal to 1. Defaults to 1. + +It cause the `foreach` loop to be called with the value from `start` included +to `stop` excluded with an increment of `step` after each loop. + +```meson +# Loop 15 times with i from 0 to 14 included. +foreach i : range(15) + ... +endforeach +``` + +The range object can also be assigned to a variable and indexed. +```meson +r = range(5, 10, 2) +assert(r[2] == 9) +``` + +## Built-in objects + +These are built-in objects that are always available. + +### `meson` object + +The `meson` object allows you to introspect various properties of the +system. This object is always mapped in the `meson` variable. It has +the following methods. + +- `add_dist_script(script_name, arg1, arg2, ...)` *(since 0.48.0)*: causes the script + given as argument to run during `dist` operation after the + distribution source has been generated but before it is + archived. Note that this runs the script file that is in the + _staging_ directory, not the one in the source directory. If the + script file can not be found in the staging directory, it is a hard + error. The `MESON_DIST_ROOT` environment variables is set when dist scripts is + run. + *(since 0.49.0)* Accepts multiple arguments for the script. + *(since 0.54.0)* The `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` + environment variables are set when dist scripts are run. They are path to the + root source and build directory of the main project, even when the script + comes from a subproject. + *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program` + as well as strings. + *(since 0.57.0)* `file` objects and the output of `configure_file` may be + used as the `script_name` parameter. + *(since 0.58.0)* This command can be invoked from a subproject, it was a hard + error in earlier versions. Subproject dist scripts will only be executed + when running `meson dist --include-subprojects`. `MESON_PROJECT_SOURCE_ROOT`, + `MESON_PROJECT_BUILD_ROOT` and `MESON_PROJECT_DIST_ROOT` environment + variables are set when dist scripts are run. They are identical to + `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT` and `MESON_DIST_ROOT` for main project + scripts, but for subproject scripts they have the path to the root of the + subproject appended, usually `subprojects/`. + +- `add_install_script(script_name, arg1, arg2, ...)`: causes the script + given as an argument to be run during the install step, this script + will have the environment variables `MESON_SOURCE_ROOT`, + `MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`, + `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set. + All positional arguments are passed as parameters. + *since 0.57.0* `skip_if_destdir` boolean keyword argument (defaults to `false`) + can be specified. If `true` the script will not be run if DESTDIR is set during + installation. This is useful in the case the script updates system wide + cache that is only needed when copying files into final destination. + + *(since 0.54.0)* If `meson install` is called with the `--quiet` option, the + environment variable `MESON_INSTALL_QUIET` will be set. + + *(since 0.55.0)* The output of `configure_file`, `files`, `find_program`, + `custom_target`, indexes of `custom_target`, `executable`, `library`, and + other built targets as well as strings. + + *(since 0.57.0)* `file` objects and the output of `configure_file` may be + *used as the `script_name` parameter. + + Meson uses the `DESTDIR` environment variable as set by the + inherited environment to determine the (temporary) installation + location for files. Your install script must be aware of this while + manipulating and installing files. The correct way to handle this is + with the `MESON_INSTALL_DESTDIR_PREFIX` variable which is always set + and contains `DESTDIR` (if set) and `prefix` joined together. This + is useful because both are usually absolute paths and there are + platform-specific edge-cases in joining two absolute paths. + + In case it is needed, `MESON_INSTALL_PREFIX` is also always set and + has the value of the `prefix` option passed to Meson. + + `MESONINTROSPECT` contains the path to the introspect command that + corresponds to the `meson` executable that was used to configure the + build. (This might be a different path than the first executable + found in `PATH`.) It can be used to query build configuration. Note + that the value will contain many parts, f.ex., it may be `python3 + /path/to/meson.py introspect`. The user is responsible for splitting + the string to an array if needed by splitting lexically like a UNIX + shell would. If your script uses Python, `shlex.split()` is the + easiest correct way to do this. + +- `add_postconf_script(script_name, arg1, arg2, ...)`: runs the + executable given as an argument after all project files have been + generated. This script will have the environment variables + `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` set. + + *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program` + as well as strings. + + *(since 0.57.0)* `file` objects and the output of `configure_file` may be + *used as the `script_name` parameter. + +- `backend()` *(since 0.37.0)*: returns a string representing the + current backend: `ninja`, `vs2010`, `vs2012`, `vs2013`, `vs2015`, + `vs2017`, `vs2019`, or `xcode`. + +- `build_root()`: returns a string with the absolute path to the build + root directory. *(deprecated since 0.56.0)*: this function will return the + build root of the parent project if called from a subproject, which is usually + not what you want. Try using `current_build_dir()` or `project_build_root()`. + In the rare cases where the root of the main project is needed, + use `global_build_root()` that has the same behaviour but with a more explicit + name. + +- `source_root()`: returns a string with the absolute path to the + source root directory. Note: you should use the `files()` function + to refer to files in the root source directory instead of + constructing paths manually with `meson.source_root()`. + *(deprecated since 0.56.0)*: This function will return the source root of the + parent project if called from a subproject, which is usually not what you want. + Try using `current_source_dir()` or `project_source_root()`. + In the rare cases where the root of the main project is needed, + use `global_source_root()` that has the same behaviour but with a more explicit + name. + +- `project_build_root()` *(since 0.56.0)*: returns a string with the absolute path + to the build root directory of the current (sub)project. + +- `project_source_root()` *(since 0.56.0)*: returns a string with the absolute path + to the source root directory of the current (sub)project. + +- `global_build_root()` *(since 0.58.0)*: returns a string with the absolute path + to the build root directory. This function will return the build root of the + main project if called from a subproject, which is usually not what you want. + It is usually preferable to use `current_build_dir()` or `project_build_root()`. + +- `global_source_root()` *(since 0.58.0)*: returns a string with the absolute path + to the source root directory. This function will return the source root of the + main project if called from a subproject, which is usually not what you want. + It is usually preferable to use `current_source_dir()` or `project_source_root()`. + +- `current_build_dir()`: returns a string with the absolute path to the + current build directory. + +- `current_source_dir()`: returns a string to the current source + directory. Note: **you do not need to use this function** when + passing files from the current source directory to a function since + that is the default. Also, you can use the `files()` function to + refer to files in the current or any other source directory instead + of constructing paths manually with `meson.current_source_dir()`. + +- `get_compiler(language)`: returns [an object describing a + compiler](#compiler-object), takes one positional argument which is + the language to use. It also accepts one keyword argument, `native` + which when set to true makes Meson return the compiler for the build + machine (the "native" compiler) and when false it returns the host + compiler (the "cross" compiler). If `native` is omitted, Meson + returns the "cross" compiler if we're currently cross-compiling and + the "native" compiler if we're not. + +- `get_cross_property(propname, fallback_value)`: + *Deprecated since 0.58.0, use `get_external_property()` instead*. + Returns the given property from a cross file, the optional fallback_value + is returned if not cross compiling or the given property is not found. + +- `get_external_property(propname, fallback_value, native: true/false)` + *(since 0.54.0)*: returns the given property from a native or cross file. + The optional fallback_value is returned if the given property is not found. + The optional `native: true` forces retrieving a variable from the + native file, even when cross-compiling. + If `native: false` or not specified, variable is retrieved from the + cross-file if cross-compiling, and from the native-file when not cross-compiling. + +- `has_external_property(propname, native: true/false)` + *(since 0.58.0)*: checks whether the given property exist in a native or + cross file. The optional `native: true` forces checking for the variable + in the native file, even when cross-compiling. + If `native: false` or not specified, the variable is checked for in the + cross-file if cross-compiling, and in the native-file when not cross-compiling. + +- `can_run_host_binaries()` *(since 0.55.0)*: returns true if the build machine can run + binaries compiled for the host. This returns true unless you are + cross compiling, need a helper to run host binaries, and don't have one. + For example when cross compiling from Linux to Windows, one can use `wine` + as the helper. + +- `has_exe_wrapper()`: *(since 0.55.0)* **(deprecated)**. Use `can_run_host_binaries` instead. + +- `install_dependency_manifest(output_name)`: installs a manifest file + containing a list of all subprojects, their versions and license + files to the file name given as the argument. + +- `is_cross_build()`: returns `true` if the current build is a [cross + build](Cross-compilation.md) and `false` otherwise. + +- `is_subproject()`: returns `true` if the current project is being + built as a subproject of some other project and `false` otherwise. + +- `is_unity()`: returns `true` when doing a [unity + build](Unity-builds.md) (multiple sources are combined before + compilation to reduce build time) and `false` otherwise. + +- `override_find_program(progname, program)` *(since 0.46.0)*: + specifies that whenever `find_program` is used to find a program + named `progname`, Meson should not look it up on the system but + instead return `program`, which may either be the result of + `find_program`, `configure_file` or `executable`. *(since 0.55.0)* If a version + check is passed to `find_program` for a program that has been overridden with + an executable, the current project version is used. + + If `program` is an `executable`, it cannot be used during configure. + +- `override_dependency(name, dep_object)` *(since 0.54.0)*: + specifies that whenever `dependency(name, ...)` is used, Meson should not + look it up on the system but instead return `dep_object`, which may either be + the result of `dependency()` or `declare_dependency()`. It takes optional + `native` keyword arguments. Doing this in a subproject allows the parent + project to retrieve the dependency without having to know the dependency + variable name: `dependency(name, fallback : subproject_name)`. + +- `project_version()`: returns the version string specified in + `project` function call. + +- `project_license()`: returns the array of licenses specified in + `project` function call. + +- `project_name()`: returns the project name specified in the `project` + function call. + +- `version()`: return a string with the version of Meson. + +- `add_devenv()`: *(Since 0.58.0)* add an [`environment()`](#environment) object + to the list of environments that will be applied when using [`meson devenv`](Commands.md#devenv) + command line. This is useful for developpers who wish to use the project without + installing it, it is often needed to set for example the path to plugins + directory, etc. Alternatively, a list or dictionary can be passed as first + argument. + ``` meson + devenv = environment() + devenv.set('PLUGINS_PATH', meson.current_build_dir()) + ... + meson.add_devenv(devenv) + ``` + After configuring and compiling that project, a terminal can be opened with + the environment set: + ```sh + $ meson devenv -C + $ echo $PLUGINS_PATH + /path/to/source/subdir + ``` + See [`meson devenv`](Commands.md#devenv) command documentation for a list of + environment variables that are set by default by Meson. + +### `build_machine` object + +Provides information about the build machine — the machine that is +doing the actual compilation. See +[Cross-compilation](Cross-compilation.md). It has the following +methods: + +- `cpu_family()`: returns the CPU family name. [This + table](Reference-tables.md#cpu-families) contains all known CPU + families. These are guaranteed to continue working. + +- `cpu()`: returns a more specific CPU name, such as `i686`, `amd64`, + etc. + +- `system()`: returns the operating system name. [This + table](Reference-tables.md#operating-system-names) Lists all of + the currently known Operating System names, these are guaranteed to + continue working. + +- `endian()`: returns `big` on big-endian systems and `little` on + little-endian systems. + +Currently, these values are populated using +[`platform.system()`](https://docs.python.org/3.4/library/platform.html#platform.system) +and +[`platform.machine()`](https://docs.python.org/3.4/library/platform.html#platform.machine). If +you think the returned values for any of these are incorrect for your +system or CPU, or if your OS is not in the linked table, please file +[a bug report](https://github.com/mesonbuild/meson/issues/new) with +details and we'll look into it. + +### `host_machine` object + +Provides information about the host machine — the machine on which the +compiled binary will run. See +[Cross-compilation](Cross-compilation.md). + +It has the same methods as [`build_machine`](#build_machine-object). + +When not cross-compiling, all the methods return the same values as +`build_machine` (because the build machine is the host machine) + +Note that while cross-compiling, it simply returns the values defined +in the cross-info file. + +### `target_machine` object + +Provides information about the target machine — the machine on which +the compiled binary's output will run. Hence, this object should only +be used while cross-compiling a compiler. See +[Cross-compilation](Cross-compilation.md). + +It has the same methods as [`build_machine`](#build_machine-object). + +When all compilation is 'native', all the methods return the same +values as `build_machine` (because the build machine is the host +machine and the target machine). + +Note that while cross-compiling, it simply returns the values defined +in the cross-info file. If `target_machine` values are not defined in +the cross-info file, `host_machine` values are returned instead. + +### `string` object + +All [strings](Syntax.md#strings) have the following methods. Strings +are immutable, all operations return their results as a new string. + +- `contains(string)`: returns true if string contains the string + specified as the argument. + +- `endswith(string)`: returns true if string ends with the string + specified as the argument. + +- `format()`: formats text, see the [Syntax + manual](Syntax.md#string-formatting) for usage info. + +- `join(list_of_strings)`: the opposite of split, for example + `'.'.join(['a', 'b', 'c']` yields `'a.b.c'`. + +- `replace('old_substr', 'new_str')` *(since 0.58.0)*: replaces instances of + `old_substr` in the string with `new_str` and returns a new string + +- `split(split_character)`: splits the string at the specified + character (or whitespace if not set) and returns the parts in an + array. + +- `startswith(string)`: returns true if string starts with the string + specified as the argument + +- `substring(start,end)` *(since 0.56.0)*: returns a substring specified from start to end. + Both `start` and `end` arguments are optional, so, for example, `'foobar'.substring()` will return `'foobar'`. + +- `strip()`: removes whitespace at the beginning and end of the string. + *(since 0.43.0)* Optionally can take one positional string argument, + and all characters in that string will be stripped. + +- `to_int()`: returns the string converted to an integer (error if string + is not a number). + +- `to_lower()`: creates a lower case version of the string. + +- `to_upper()`: creates an upper case version of the string. + +- `underscorify()`: creates a string where every non-alphabetical + non-number character is replaced with `_`. + +- `version_compare(comparison_string)`: does semantic version + comparison, if `x = '1.2.3'` then `x.version_compare('>1.0.0')` + returns `true`. + +### `Number` object + +[Numbers](Syntax.md#numbers) support these methods: + +- `is_even()`: returns true if the number is even +- `is_odd()`: returns true if the number is odd +- `to_string()`: returns the value of the number as a string. + +### `boolean` object + +A [boolean](Syntax.md#booleans) object has two simple methods: + +- `to_int()`: returns either `1` or `0`. + +- `to_string()`: returns the string `'true'` if the boolean is true or + `'false'` otherwise. You can also pass it two strings as positional + arguments to specify what to return for true/false. For instance, + `bool.to_string('yes', 'no')` will return `yes` if the boolean is + true and `no` if it is false. + +### `array` object + +The following methods are defined for all [arrays](Syntax.md#arrays): + +- `contains(item)`: returns `true` if the array contains the object + given as argument, `false` otherwise + +- `get(index, fallback)`: returns the object at the given index, + negative indices count from the back of the array, indexing out of + bounds returns the `fallback` value *(since 0.38.0)* or, if it is + not specified, causes a fatal error + +- `length()`: the size of the array + +You can also iterate over arrays with the [`foreach` +statement](Syntax.md#foreach-statements). + +### `dictionary` object + +*(since 0.47.0)* + +The following methods are defined for all [dictionaries](Syntax.md#dictionaries): + +- `has_key(key)`: returns `true` if the dictionary contains the key + given as argument, `false` otherwise + +- `get(key, fallback)`: returns the value for the key given as first + argument if it is present in the dictionary, or the optional + fallback value given as the second argument. If a single argument + was given and the key was not found, causes a fatal error + +- `keys()`: returns an array of keys in the dictionary + +You can also iterate over dictionaries with the [`foreach` +statement](Syntax.md#foreach-statements). + +*(since 0.48.0)* Dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`). +Values from the second dictionary overrides values from the first. + +## Returned objects + +These are objects returned by the [functions listed above](#functions). + +### `compiler` object + +This object is returned by +[`meson.get_compiler(lang)`](#meson-object). It represents a compiler +for a given language and allows you to query its properties. It has +the following methods: + +- `alignment(typename)`: returns the alignment of the type specified in + the positional argument, you can specify external dependencies to + use with `dependencies` keyword argument. + +- `cmd_array()`: returns an array containing the command(s) for the compiler. + +- `compiles(code)`: returns true if the code fragment given in the + positional argument compiles, you can specify external dependencies + to use with `dependencies` keyword argument, `code` can be either a + string containing source code or a `file` object pointing to the + source code. + +- `compute_int(expr, ...')`: computes the value of the given expression + (as an example `1 + 2`). When cross compiling this is evaluated with + an iterative algorithm, you can specify keyword arguments `low` + (defaults to -1024), `high` (defaults to 1024) and `guess` to + specify max and min values for the search and the value to try + first. + +- `find_library(lib_name, ...)`: tries to find the library specified in + the positional argument. The [result + object](#external-library-object) can be used just like the return + value of `dependency`. If the keyword argument `required` is false, + Meson will proceed even if the library is not found. By default the + library is searched for in the system library directory + (e.g. /usr/lib). This can be overridden with the `dirs` keyword + argument, which can be either a string or a list of strings. + *(since 0.47.0)* The value of a [`feature`](Build-options.md#features) + option can also be passed to the `required` keyword argument. + *(since 0.49.0)* If the keyword argument `disabler` is `true` and the + dependency couldn't be found, return a [disabler object](#disabler-object) + instead of a not-found dependency. *(since 0.50.0)* The `has_headers` keyword + argument can be a list of header files that must be found as well, using + `has_header()` method. All keyword arguments prefixed with `header_` will be + passed down to `has_header()` method with the prefix removed. *(since 0.51.0)* + The `static` keyword (boolean) can be set to `true` to limit the search to + static libraries and `false` for dynamic/shared. + +- `first_supported_argument(list_of_strings)`: given a list of + strings, returns the first argument that passes the `has_argument` + test or an empty array if none pass. + +- `first_supported_link_argument(list_of_strings)` *(since 0.46.0)*: + given a list of strings, returns the first argument that passes the + `has_link_argument` test or an empty array if none pass. + +- `get_define(definename)`: returns the given preprocessor symbol's + value as a string or empty string if it is not defined. + *(since 0.47.0)* This method will concatenate string literals as + the compiler would. E.g. `"a" "b"` will become `"ab"`. + +- `get_id()`: returns a string identifying the compiler. For example, + `gcc`, `msvc`, [and more](Reference-tables.md#compiler-ids). + +- `get_argument_syntax()` *(since 0.49.0)*: returns a string identifying the type + of arguments the compiler takes. Can be one of `gcc`, `msvc`, or an undefined + string value. This method is useful for identifying compilers that are not + gcc or msvc, but use the same argument syntax as one of those two compilers + such as clang or icc, especially when they use different syntax on different + operating systems. + +- `get_linker_id()` *(since 0.53.0)*: returns a string identifying the linker. + For example, `ld.bfd`, `link`, [and more](Reference-tables.md#linker-ids). + +- `get_supported_arguments(list_of_string)` *(since 0.43.0)*: returns + an array containing only the arguments supported by the compiler, + as if `has_argument` were called on them individually. + +- `get_supported_link_arguments(list_of_string)` *(since 0.46.0)*: returns + an array containing only the arguments supported by the linker, + as if `has_link_argument` were called on them individually. + +- `has_argument(argument_name)`: returns true if the compiler accepts + the specified command line argument, that is, can compile code + without erroring out or printing a warning about an unknown flag. + +- `has_link_argument(argument_name)` *(since 0.46.0)*: returns true if + the linker accepts the specified command line argument, that is, can + compile and link code without erroring out or printing a warning + about an unknown flag. Link arguments will be passed to the + compiler, so should usually have the `-Wl,` prefix. On VisualStudio + a `/link` argument will be prepended. + +- `has_function(funcname)`: returns true if the given function is + provided by the standard library or a library passed in with the + `args` keyword, you can specify external dependencies to use with + `dependencies` keyword argument. + +- `check_header(header_name)` *(since 0.47.0)*: returns true if the + specified header is *usable* with the specified prefix, + dependencies, and arguments. You can specify external dependencies + to use with `dependencies` keyword argument and extra code to put + above the header test with the `prefix` keyword. In order to look + for headers in a specific directory you can use `args : + '-I/extra/include/dir`, but this should only be used in exceptional + cases for includes that can't be detected via pkg-config and passed + via `dependencies`. *(since 0.50.0)* The `required` keyword argument + can be used to abort if the header cannot be found. + +- `has_header(header_name)`: returns true if the specified header + *exists*, and is faster than `check_header()` since it only does a + pre-processor check. You can specify external dependencies to use + with `dependencies` keyword argument and extra code to put above the + header test with the `prefix` keyword. In order to look for headers + in a specific directory you can use `args : '-I/extra/include/dir`, + but this should only be used in exceptional cases for includes that + can't be detected via pkg-config and passed via `dependencies`. + *(since 0.50.0)* The `required` keyword argument can be used to + abort if the header cannot be found. + +- `has_header_symbol(headername, symbolname)`: detects + whether a particular symbol (function, variable, #define, type + definition, etc) is declared in the specified header, you can + specify external dependencies to use with `dependencies` keyword + argument. *(since 0.50.0)* The `required` keyword argument can be + used to abort if the symbol cannot be found. + +- `has_member(typename, membername)`: takes two arguments, type name + and member name and returns true if the type has the specified + member, you can specify external dependencies to use with + `dependencies` keyword argument. + +- `has_members(typename, membername1, membername2, ...)`: takes at + least two arguments, type name and one or more member names, returns + true if the type has all the specified members, you can specify + external dependencies to use with `dependencies` keyword argument. + +- `has_multi_arguments(arg1, arg2, arg3, ...)` *(since 0.37.0)*: the same as + `has_argument` but takes multiple arguments and uses them all in a + single compiler invocation. + +- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(since 0.46.0)*: + the same as `has_link_argument` but takes multiple arguments and + uses them all in a single compiler invocation. + +- `has_type(typename)`: returns true if the specified token is a type, + you can specify external dependencies to use with `dependencies` + keyword argument. + +- `links(code)`: returns true if the code fragment given in the + positional argument compiles and links, you can specify external + dependencies to use with `dependencies` keyword argument, `code` can + be either a string containing source code or a `file` object + pointing to the source code. + +- `run(code)`: attempts to compile and execute the given code fragment, + returns a run result object, you can specify external dependencies + to use with `dependencies` keyword argument, `code` can be either a + string containing source code or a `file` object pointing to the + source code. + +- `symbols_have_underscore_prefix()` *(since 0.37.0)*: returns `true` + if the C symbol mangling is one underscore (`_`) prefixed to the symbol. + +- `sizeof(typename, ...)`: returns the size of the given type + (e.g. `'int'`) or -1 if the type is unknown, to add includes set + them in the `prefix` keyword argument, you can specify external + dependencies to use with `dependencies` keyword argument. + +- `version()`: returns the compiler's version number as a string. + +- `has_function_attribute(name)` *(since 0.48.0)*: returns `true` if the + compiler supports the GNU style (`__attribute__(...)`) `name`. This is + preferable to manual compile checks as it may be optimized for compilers that + do not support such attributes. + [This table](Reference-tables.md#gcc-__attribute__) lists all of the + supported attributes. + +- `get_supported_function_attributes(list_of_names)` *(since 0.48.0)*: + returns an array containing any names that are supported GCC style + attributes. Equivalent to `has_function_attribute` was called on each of them + individually. + +The following keyword arguments can be used: + +- `args`: used to pass a list of compiler arguments that are + required to find the header or symbol. For example, you might need + to pass the include path `-Isome/path/to/header` if a header is not + in the default include path. *(since 0.38.0)* you should use the + `include_directories` keyword described below. You may also want to + pass a library name `-lfoo` for `has_function` to check for a function. + Supported by all methods except `get_id`, `version`, and `find_library`. + +- `include_directories` *(since 0.38.0)*: specifies extra directories for + header searches. + +- `name`: the name to use for printing a message about the compiler + check. Supported by the methods `compiles()`, `links()`, and + `run()`. If this keyword argument is not passed to those methods, no + message will be printed about the check. + +- `no_builtin_args`: when set to true, the compiler arguments controlled + by built-in configuration options are not added. + +- `prefix`: adds #includes and other things that are + required for the symbol to be declared. System definitions should be + passed via compiler args (eg: `_GNU_SOURCE` is often required for + some symbols to be exposed on Linux, and it should be passed via + `args` keyword argument, see below). Supported by the methods + `sizeof`, `has_type`, `has_function`, `has_member`, `has_members`, + `check_header`, `has_header`, `has_header_symbol`, `get_define` + +**Note:** These compiler checks do not use compiler arguments added +with `add_*_arguments()`, via `-Dlang_args` on the command-line, or +through `CFLAGS`/`LDFLAGS`, etc in the environment. Hence, you can +trust that the tests will be fully self-contained, and won't fail +because of custom flags added by other parts of the build file or by +users. + +Note that if you have a single prefix with all your dependencies, you +might find it easier to append to the environment variables +`C_INCLUDE_PATH` with GCC/Clang and `INCLUDE` with MSVC to expand the +default include path, and `LIBRARY_PATH` with GCC/Clang and `LIB` with +MSVC to expand the default library search path. + +However, with GCC, these variables will be ignored when +cross-compiling. In that case you need to use a specs file. See: + + +### `build target` object + +A build target is either an [executable](#executable), [shared +library](#shared_library), [static library](#static_library), [both +shared and static library](#both_libraries) or [shared +module](#shared_module). + +- `extract_all_objects()`: is same as `extract_objects` but returns all + object files generated by this target. *(since 0.46.0)* keyword argument + `recursive` must be set to `true` to also return objects passed to + the `object` argument of this target. By default only objects built + for this target are returned to maintain backward compatibility with + previous versions. The default will eventually be changed to `true` + in a future version. + +- `extract_objects(source1, source2, ...)`: takes as its arguments + a number of source files as [`string`](#string-object) or + [`files()`](#files) and returns an opaque value representing the + object files generated for those source files. This is typically used + to take single object files and link them to unit tests or to compile + some source files with custom flags. To use the object file(s) + in another build target, use the [`objects:`](#executable) keyword + argument or include them in the command line of a + [`custom_target`](#custom_target)`. + +- `full_path()`: returns a full path pointing to the result target file. + NOTE: In most cases using the object itself will do the same job as + this and will also allow Meson to setup inter-target dependencies + correctly. Please file a bug if that doesn't work for you. + +- `path()` *(since 0.59.0)* **(deprecated)**: does the exact same + as `full_path()`. **NOTE:** This function is solely kept for compatebility + with [`external program`](#external-program-object) objects. It will be + removed once the, also deprecated, corresponding `path()` function in the + `external program` object is removed. + +- `private_dir_include()`: returns a opaque value that works like + `include_directories` but points to the private directory of this + target, usually only needed if an another target needs to access + some generated internal headers of this target + +- `name()` *(since 0.54.0)*: returns the target name. + +- `found()` *(since 0.59.0)*: Always returns `true`. This function is meant + to make executables objects feature compatible with + [`external program`](#external-program-object) objects. This simplifies + use-cases where an executable is used instead of an external program. + + +### `configuration` data object + +This object is returned by +[`configuration_data()`](#configuration_data) and encapsulates +configuration values to be used for generating configuration files. A +more in-depth description can be found in the [the configuration wiki +page](Configuration.md) It has three methods: + +- `get(varname, default_value)`: returns the value of `varname`, if the + value has not been set returns `default_value` if it is defined + *(since 0.38.0)* and errors out if not + +- `get_unquoted(varname, default_value)` *(since 0.44.0)*: returns the value + of `varname` but without surrounding double quotes (`"`). If the value has + not been set returns `default_value` if it is defined and errors out if not. + +- `has(varname)`: returns `true` if the specified variable is set + +- `keys()`*(since 0.57.0)*: returns an array of keys of + the configuration data object. + + You can iterate over this array with the [`foreach` + statement](Syntax.md#foreach-statements). + +- `merge_from(other)` *(since 0.42.0)*: takes as argument a different + configuration data object and copies all entries from that object to + the current. + +- `set(varname, value)`, sets a variable to a given value + +- `set10(varname, boolean_value)` is the same as above but the value + is either `true` or `false` and will be written as 1 or 0, + respectively + +- `set_quoted(varname, value)` is same as `set` but quotes the value + in double quotes (`"`) + +They all take the `description` keyword that will be written in the +result file. The replacement assumes a file with C syntax. If your +generated file is source code in some other language, you probably +don't want to add a description field because it most likely will +cause a syntax error. + +### `custom target` object + +This object is returned by [`custom_target`](#custom_target) and +contains a target with the following methods: + +- `full_path()`: returns a full path pointing to the result target file + NOTE: In most cases using the object itself will do the same job as + this and will also allow Meson to setup inter-target dependencies + correctly. Please file a bug if that doesn't work for you. + *(since 0.54.0)* It can be also called on indexes objects: + `custom_targets[i].full_path()`. + +- `[index]`: returns an opaque object that references this target, and + can be used as a source in other targets. When it is used as such it + will make that target depend on this custom target, but the only + source added will be the one that corresponds to the index of the + custom target's output argument. + +- `to_list()` *(since 0.54.0)*: returns a list of opaque objects that references + this target, and can be used as a source in other targets. This can be used to + iterate outputs with `foreach` loop. + +### `dependency` object + +This object is returned by [`dependency()`](#dependency) and contains +an external dependency with the following methods: + + - `found()`: returns whether the dependency was found. + + - `name()` *(since 0.48.0)*: returns the name of the dependency that was + searched. Returns `internal` for dependencies created with + `declare_dependency()`. + + - `get_pkgconfig_variable(varname)` *(since 0.36.0)*: gets the + pkg-config variable specified, or, if invoked on a non pkg-config + dependency, error out. *(since 0.44.0)* You can also redefine a + variable by passing a list to the `define_variable` parameter + that can affect the retrieved variable: `['prefix', '/'])`. + *(since 0.45.0)* A warning is issued if the variable is not defined, + unless a `default` parameter is specified. + + *(Deprecated since 0.56.0*) use `get_variable(pkgconfig : ...)` instead + + - `get_configtool_variable(varname)` *(since 0.44.0)*: gets the + command line argument from the config tool (with `--` prepended), or, + if invoked on a non config-tool dependency, error out. + + *(Deprecated since 0.56.0*) use `get_variable(configtool : ...)` instead + + - `type_name()`: returns a string describing the type of the + dependency, the most common values are `internal` for deps created + with `declare_dependency()` and `pkgconfig` for system dependencies + obtained with Pkg-config. + + - `version()`: the version number as a string, for example `1.2.8`. + `unknown` if the dependency provider doesn't support determining the + version. + + - `include_type()`: returns whether the value set by the `include_type` kwarg + + - `as_system(value)`: returns a copy of the dependency object, which has changed + the value of `include_type` to `value`. The `value` argument is optional and + defaults to `'preserve'`. + + - `as_link_whole()` *Since 0.56.0* Only dependencies created with + `declare_dependency()`, returns a copy of the dependency object with all + link_with arguments changed to link_whole. This is useful for example for + fallback dependency from a subproject built with `default_library=static`. + Note that all `link_with` objects must be static libraries otherwise an error + will be raised when trying to `link_whole` a shared library. + + - `partial_dependency(compile_args : false, link_args : false, links + : false, includes : false, sources : false)` *(since 0.46.0)*: returns + a new dependency object with the same name, version, found status, + type name, and methods as the object that called it. This new + object will only inherit other attributes from its parent as + controlled by keyword arguments. + + If the parent has any dependencies, those will be applied to the new + partial dependency with the same rules. So, given: + + ```meson + dep1 = declare_dependency(compile_args : '-Werror=foo', link_with : 'libfoo') + dep2 = declare_dependency(compile_args : '-Werror=bar', dependencies : dep1) + dep3 = dep2.partial_dependency(compile_args : true) + ``` + + dep3 will add `['-Werror=foo', '-Werror=bar']` to the compiler args + of any target it is added to, but libfoo will not be added to the + link_args. + + *Note*: A bug present until 0.50.1 results in the above behavior + not working correctly. + + The following arguments will add the following attributes: + + - compile_args: any arguments passed to the compiler + - link_args: any arguments passed to the linker + - links: anything passed via link_with or link_whole + - includes: any include_directories + - sources: any compiled or static sources the dependency has + + - `get_variable(varname, cmake : str, pkgconfig : str, configtool : str, + internal: str, default_value : str, pkgconfig_define : [str, str])` + *(since 0.51.0)*: a generic variable getter method, which replaces the + get_*type*_variable methods. This allows one to get the variable + from a dependency without knowing specifically how that dependency + was found. If default_value is set and the value cannot be gotten + from the object then default_value is returned, if it is not set + then an error is raised. + *(since 0.54.0)* added `internal` keyword. + *(since 0.58.0)* added `varname` as first positional argument. It is used as + default value for `cmake`, `pkgconfig`, `configtool` and `internal` keyword + arguments. It is useful in the common case where `pkgconfig` and `internal` + use the same variable name, in which case it's easier to write `dep.get_variable('foo')` + instead of `dep.get_variable(pkgconfig: 'foo', internal: 'foo')`. + +### `disabler` object + +A disabler object is an object that behaves in much the same way as +NaN numbers do in floating point math. That is when used in any +statement (function call, logical op, etc) they will cause the +statement evaluation to immediately short circuit to return a disabler +object. A disabler object has one method: + +- `found()`: always returns `false`. + +### `external program` object + +This object is returned by [`find_program()`](#find_program) and +contains an external (i.e. not built as part of this project) program +and has the following methods: + +- `found()`: returns whether the executable was found. + +- `path()`: *(since 0.55.0)* **(deprecated)** use `full_path()` instead. + Returns a string pointing to the script or executable + **NOTE:** You should not need to use this method. Passing the object + itself should work in all cases. For example: `run_command(obj, arg1, arg2)`. + +- `full_path()` (*since 0.55.0*): which returns a string pointing to the script or + executable **NOTE:** You should not need to use this method. Passing the object + itself should work in all cases. For example: `run_command(obj, arg1, arg2)`. + +### `environment` object + +This object is returned by [`environment()`](#environment) and stores +detailed information about how environment variables should be set +during tests. It should be passed as the `env` keyword argument to +tests and other functions. It has the following methods. + +- `append(varname, value1, value2, ...)`: appends the given values to + the old value of the environment variable, e.g. `env.append('FOO', + 'BAR', 'BAZ', separator : ';')` produces `BOB;BAR;BAZ` if `FOO` had + the value `BOB` and plain `BAR;BAZ` if the value was not defined. If + the separator is not specified explicitly, the default path + separator for the host operating system will be used, i.e. ';' for + Windows and ':' for UNIX/POSIX systems. + +- `prepend(varname, value1, value2, ...)`: same as `append` + except that it writes to the beginning of the variable. + +- `set(varname, value1, value2)`: sets the environment variable + specified in the first argument to the values in the second argument + joined by the separator, e.g. `env.set('FOO', 'BAR'),` sets envvar + `FOO` to value `BAR`. See `append()` above for how separators work. + +*Since 0.58.0* `append()` and `prepend()` methods can be called multiple times +on the same `varname`. Earlier Meson versions would warn and only the last +operation took effect. + +```meson +env = environment() + +# MY_PATH will be '0:1:2:3' +env.set('MY_PATH', '1') +env.append('MY_PATH', '2') +env.append('MY_PATH', '3') +env.prepend('MY_PATH', '0') +``` + +### `external library` object + +This object is returned by [`find_library()`](#find_library) and +contains an external (i.e. not built as part of this project) +library. This object has the following methods: + +- `found()`: returns whether the library was found. + +- `type_name()` *(since 0.48.0)*: returns a string describing + the type of the dependency, which will be `library` in this case. + +- `partial_dependency(compile_args : false, link_args : false, links + : false, includes : false, source : false)` *(since 0.46.0)*: returns + a new dependency object with the same name, version, found status, + type name, and methods as the object that called it. This new + object will only inherit other attributes from its parent as + controlled by keyword arguments. + +### Feature option object + +*(since 0.47.0)* + +The following methods are defined for all [`feature` options](Build-options.md#features): + +- `enabled()`: returns whether the feature was set to `'enabled'` +- `disabled()`: returns whether the feature was set to `'disabled'` +- `auto()`: returns whether the feature was set to `'auto'` +- `allowed()` *(since 0.59.0)*: returns whether the feature was set to `'enabled'` or `'auto'` +- `disable_auto_if(value)` *(since 0.59.0)*: returns the feature, with + `'auto'` converted to `'disabled'` if value is true. + + | Feature / Condition | True | False | + | ------------------- | ---- | ----- | + | Enabled | Enabled | Enabled | + | Disabled | Disabled | Disabled | + | Auto | Disabled | Auto | + +- `require(value, error_message: '')` *(since 0.59.0)*: returns + the object itself if the value is true; an error if the object is + `'enabled'` and the value is false; a disabled feature if the object + is `'auto'` or `'disabled'` and the value is false. + +`require` is useful to restrict the applicability of `'auto'` features, +for example based on other features or on properties of the host machine: + +``` +if get_option('directx').require(host_machine.system() == 'windows', + error_message: 'DirectX only available on Windows').allowed() then + src += ['directx.c'] + config.set10('HAVE_DIRECTX', 1) +endif +``` + +### `generator` object + +This object is returned by [`generator()`](#generator) and contains a +generator that is used to transform files from one type to another by +an executable (e.g. `idl` files into source code and headers). + +- `process(list_of_files, ...)`: takes a list of files, causes them to + be processed and returns an object containing the result which can + then, for example, be passed into a build target definition. The + keyword argument `extra_args`, if specified, will be used to replace + an entry `@EXTRA_ARGS@` in the argument list. The keyword argument + `preserve_path_from`, if given, specifies that the output files need + to maintain their directory structure inside the target temporary + directory. The most common value for this is + `meson.current_source_dir()`. With this value when a file called + `subdir/one.input` is processed it generates a file `/subdir/one.out` as opposed to `/one.out`. + +### `subproject` object + +This object is returned by [`subproject()`](#subproject) and is an +opaque object representing it. + +- `found()` *(since 0.48.0)*: returns whether the subproject was + successfully setup + +- `get_variable(name, fallback)`: fetches the specified variable from + inside the subproject. This is useful to, for instance, get a + [declared dependency](#declare_dependency) from the + [subproject](Subprojects.md). + + If the variable does not exist, the variable `fallback` is returned. + If a fallback is not specified, then attempting to read a non-existing + variable will cause a fatal error. + +### `run result` object + +This object encapsulates the result of trying to compile and run a +sample piece of code with [`compiler.run()`](#compiler-object) or +[`run_command()`](#run_command). It has the following methods: + +- `compiled()`: if true, the compilation succeeded, if false it did not + and the other methods return unspecified data. This is only available + for `compiler.run()` results. +- `returncode()`: the return code of executing the compiled binary +- `stderr()`: the standard error produced when the command was run +- `stdout()`: the standard out produced when the command was run + +### `module` object + +Modules provide their own specific implementation methods, but all modules +proivide the following methods: + +- `bool found()`: returns True if the module was successfully imported, + otherwise false. *Since 0.59.0* diff --git a/meson/docs/markdown/Reference-tables.md b/meson/docs/markdown/Reference-tables.md new file mode 100644 index 000000000..f2fc663eb --- /dev/null +++ b/meson/docs/markdown/Reference-tables.md @@ -0,0 +1,334 @@ +# Reference tables + +## Compiler ids + +These are return values of the `get_id` (Compiler family) and +`get_argument_syntax` (Argument syntax) method in a compiler object. + +| Value | Compiler family | Argument syntax | +| ----- | --------------- | --------------- | +| arm | ARM compiler | | +| armclang | ARMCLANG compiler | | +| c2000 | Texas Instruments C2000 compiler | | +| ccomp | The CompCert formally-verified C compiler | | +| ccrx | Renesas RX Family C/C++ compiler | | +| clang | The Clang compiler | gcc | +| clang-cl | The Clang compiler (MSVC compatible driver) | msvc | +| dmd | D lang reference compiler | | +| emscripten| Emscripten WASM compiler | | +| flang | Flang Fortran compiler | | +| g95 | The G95 Fortran compiler | | +| gcc | The GNU Compiler Collection | gcc | +| intel | Intel compiler (Linux and Mac) | gcc | +| intel-cl | Intel compiler (Windows) | msvc | +| lcc | Elbrus C/C++/Fortran Compiler | | +| llvm | LLVM-based compiler (Swift, D) | | +| mono | Xamarin C# compiler | | +| msvc | Microsoft Visual Studio | msvc | +| nagfor | The NAG Fortran compiler | | +| nvidia_hpc| NVidia HPC SDK compilers | | +| open64 | The Open64 Fortran Compiler | | +| pathscale | The Pathscale Fortran compiler | | +| pgi | Portland PGI C/C++/Fortran compilers | | +| rustc | Rust compiler | | +| sun | Sun Fortran compiler | | +| valac | Vala compiler | | +| xc16 | Microchip XC16 C compiler | | +| cython | The Cython compiler | | + +## Linker ids + +These are return values of the `get_linker_id` method in a compiler object. + +| Value | Linker family | +| ----- | --------------- | +| ld.bfd | The GNU linker | +| ld.gold | The GNU gold linker | +| ld.lld | The LLVM linker, with the GNU interface | +| ld.solaris | Solaris and illumos | +| ld.wasm | emscripten's wasm-ld linker | +| ld64 | Apple ld64 | +| link | MSVC linker | +| lld-link | The LLVM linker, with the MSVC interface | +| xilink | Used with Intel-cl only, MSVC like | +| optlink | optlink (used with DMD) | +| rlink | The Renesas linker, used with CCrx only | +| xc16-ar | The Microchip linker, used with XC16 only | +| ar2000 | The Texas Instruments linker, used with C2000 only | +| armlink | The ARM linker (arm and armclang compilers) | +| pgi | Portland/Nvidia PGI | +| nvlink | Nvidia Linker used with cuda | +| ccomp | CompCert used as the linker driver | + +For languages that don't have separate dynamic linkers such as C# and Java, the +`get_linker_id` will return the compiler name. + +## Script environment variables + +| Value | Comment | +| ----- | ------- | +| MESONINTROSPECT | Command to run to run the introspection command, may be of the form `python /path/to/meson introspect`, user is responsible for splitting the path if necessary. | +| MESON_BUILD_ROOT | Absolute path to the build dir | +| MESON_DIST_ROOT | Points to the root of the staging directory, only set when running `dist` scripts | +| MESON_SOURCE_ROOT | Absolute path to the source dir | +| MESON_SUBDIR | Current subdirectory, only set for `run_command` | + +## CPU families + +These are returned by the `cpu_family` method of `build_machine`, +`host_machine` and `target_machine`. For cross compilation they are +set in the cross file. + +| Value | Comment | +| ----- | ------- | +| aarch64 | 64 bit ARM processor | +| alpha | DEC Alpha processor | +| arc | 32 bit ARC processor | +| arm | 32 bit ARM processor | +| avr | Atmel AVR processor | +| c2000 | 32 bit C2000 processor | +| csky | 32 bit CSky processor | +| dspic | 16 bit Microchip dsPIC | +| e2k | MCST Elbrus processor | +| ia64 | Itanium processor | +| loongarch64 | 64 bit Loongson processor| +| m68k | Motorola 68000 processor | +| microblaze | MicroBlaze processor | +| mips | 32 bit MIPS processor | +| mips64 | 64 bit MIPS processor | +| parisc | HP PA-RISC processor | +| pic24 | 16 bit Microchip PIC24 | +| ppc | 32 bit PPC processors | +| ppc64 | 64 bit PPC processors | +| riscv32 | 32 bit RISC-V Open ISA | +| riscv64 | 64 bit RISC-V Open ISA | +| rl78 | Renesas RL78 | +| rx | Renesas RX 32 bit MCU | +| s390 | IBM zSystem s390 | +| s390x | IBM zSystem s390x | +| sh4 | SuperH SH-4 | +| sparc | 32 bit SPARC | +| sparc64 | SPARC v9 processor | +| wasm32 | 32 bit Webassembly | +| wasm64 | 64 bit Webassembly | +| x86 | 32 bit x86 processor | +| x86_64 | 64 bit x86 processor | + +Any cpu family not listed in the above list is not guaranteed to +remain stable in future releases. + +Those porting from autotools should note that Meson does not add +endianness to the name of the cpu_family. For example, autotools +will call little endian PPC64 "ppc64le", Meson will not, you must +also check the `.endian()` value of the machine for this information. + +## Operating system names + +These are provided by the `.system()` method call. + +| Value | Comment | +| ----- | ------- | +| android | By convention only, subject to change | +| cygwin | The Cygwin environment for Windows | +| darwin | Either OSX or iOS | +| dragonfly | DragonFly BSD | +| emscripten | Emscripten's Javascript environment | +| freebsd | FreeBSD and its derivatives | +| gnu | GNU Hurd | +| haiku | | +| linux | | +| netbsd | | +| openbsd | | +| windows | Any version of Windows | +| sunos | illumos and Solaris | + +Any string not listed above is not guaranteed to remain stable in +future releases. + +## Language arguments parameter names + +These are the parameter names for passing language specific arguments to your build target. + +| Language | compiler name | linker name | +| ------------- | ------------- | ----------------- | +| C | c_args | c_link_args | +| C++ | cpp_args | cpp_link_args | +| C# | cs_args | cs_link_args | +| D | d_args | d_link_args | +| Fortran | fortran_args | fortran_link_args | +| Java | java_args | java_link_args | +| Objective C | objc_args | objc_link_args | +| Objective C++ | objcpp_args | objcpp_link_args | +| Rust | rust_args | rust_link_args | +| Vala | vala_args | vala_link_args | +| Cython | cython_args | cython_link_args | + +All these `_*` options are specified per machine. See in +[specifying options per +machine](Builtin-options.md#Specifying-options-per-machine) for on how +to do this in cross builds. + +## Compiler and linker flag environment variables + +These environment variables will be used to modify the compiler and +linker flags. + +It is recommended that you **do not use these**. They are provided +purely to for backwards compatibility with other build systems. There +are many caveats to their use, especially when rebuilding the project. +It is **highly** recommended that you use [the command line +arguments](#language-arguments-parameter-names) instead. + +| Name | Comment | +| ----- | ------- | +| CFLAGS | Flags for the C compiler | +| CXXFLAGS | Flags for the C++ compiler | +| OBJCFLAGS | Flags for the Objective C compiler | +| FFLAGS | Flags for the Fortran compiler | +| DFLAGS | Flags for the D compiler | +| VALAFLAGS | Flags for the Vala compiler | +| RUSTFLAGS | Flags for the Rust compiler | +| CYTHONFLAGS | Flags for the Cython compiler | +| LDFLAGS | The linker flags, used for all languages | + +N.B. these settings are specified per machine, and so the environment +varibles actually come in pairs. See the [environment variables per +machine](#Environment-variables-per-machine) section for details. + +## Function Attributes + +These are the parameters names that are supported using +`compiler.has_function_attribute()` or +`compiler.get_supported_function_attributes()` + +### GCC `__attribute__` + +These values are supported using the GCC style `__attribute__` annotations, +which are supported by GCC, Clang, and other compilers. + + +| Name | +|--------------------------| +| alias | +| aligned | +| alloc_size | +| always_inline | +| artificial | +| cold | +| const | +| constructor | +| constructor_priority | +| deprecated | +| destructor | +| error | +| externally_visible | +| fallthrough | +| flatten | +| format | +| format_arg | +| force_align_arg_pointer³ | +| gnu_inline | +| hot | +| ifunc | +| malloc | +| noclone | +| noinline | +| nonnull | +| noreturn | +| nothrow | +| optimize | +| packed | +| pure | +| returns_nonnull | +| unused | +| used | +| visibility* | +| visibility:default† | +| visibility:hidden† | +| visibility:internal† | +| visibility:protected† | +| warning | +| warn_unused_result | +| weak | +| weakreaf | + +\* *Changed in 0.52.0* the "visibility" target no longer includes +"protected", which is not present in Apple's clang. + +† *New in 0.52.0* These split visibility attributes are preferred to the plain +"visibility" as they provide narrower checks. + +³ *New in 0.55.0* + +### MSVC __declspec + +These values are supported using the MSVC style `__declspec` annotation, +which are supported by MSVC, GCC, Clang, and other compilers. + +| Name | +|----------------------| +| dllexport | +| dllimport | + + +## Dependency lookup methods + +These are the values that can be passed to `dependency` function's +`method` keyword argument. + +| Name | Comment | +| ----- | ------- | +| auto | Automatic method selection | +| pkg-config | Use Pkg-Config | +| cmake | Look up as a CMake module | +| config-tool | Use a custom dep tool such as `cups-config` | +| system | System provided (e.g. OpenGL) | +| extraframework | A macOS/iOS framework | + + +## Compiler and Linker selection variables + +N.B. these settings are specified per machine, and so the environment +varibles actually come in pairs. See the [environment variables per +machine](#Environment-variables-per-machine) section for details. + +| Language | Compiler | Linker | Note | +|---------------|----------|-----------|---------------------------------------------| +| C | CC | CC_LD | | +| C++ | CXX | CXX_LD | | +| D | DC | DC_LD | Before 0.54 D_LD* | +| Fortran | FC | FC_LD | Before 0.54 F_LD* | +| Objective-C | OBJC | OBJC_LD | | +| Objective-C++ | OBJCXX | OBJCXX_LD | Before 0.54 OBJCPP_LD* | +| Rust | RUSTC | RUSTC_LD | Before 0.54 RUST_LD* | +| Vala | VALAC | | Use CC_LD. Vala transpiles to C | +| C# | CSC | CSC | The linker is the compiler | + +*The old environment variales are still supported, but are deprecated +and will be removed in a future version of Meson.* + +## Environment variables per machine + +Since *0.54.0*, Following Autotool and other legacy build systems, +environment variables that affect machine-specific settings come in +pairs: for every bare environment variable `FOO`, there is a suffixed +`FOO_FOR_BUILD`, where `FOO` just affects the host machine +configuration, while `FOO_FOR_BUILD` just affects the build machine +configuration. For example: + + - `PKG_CONFIG_PATH_FOR_BUILD` controls the paths pkg-config will search for + just `native: true` dependencies (build machine). + + - `PKG_CONFIG_PATH` controls the paths pkg-config will search for just + `native: false` dependencies (host machine). + +This mirrors the `build.` prefix used for (built-in) Meson options, +which has the same meaning. + +This is useful for cross builds. In the native builds, build = host, +and the unsuffixed environment variables alone will suffice. + +Prior to *0.54.0*, there was no `_FOR_BUILD`-suffixed variables, and +most environment variables only effected native machine +configurations, though this wasn't consistent (e.g. `PKG_CONFIG_PATH` +still affected cross builds). diff --git a/meson/docs/markdown/Release-notes-for-0.37.0.md b/meson/docs/markdown/Release-notes-for-0.37.0.md new file mode 100644 index 000000000..930c1d1f4 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.37.0.md @@ -0,0 +1,173 @@ +--- +title: Release 0.37 +short-description: Release notes for 0.37 +... + +# New features + +## Mesontest + +Mesontest is a new testing tool that allows you to run your tests in +many different ways. As an example you can run tests multiple times: + + mesontest --repeat=1000 a_test + +or with an arbitrary wrapper executable: + + mesontest --wrap='valgrind --tool=helgrind' a_test + +or under `gdb`, 1000 times in a row. This is handy for tests that fail +spuriously, as when the crash happens you are given the full GDB +command line: + + mesontest --repeat=1000 --gdb a_test + +## Mesonrewriter + +Mesonrewriter is an experimental tool to manipulate your build +definitions programmatically. It is not installed by default yet but +those interested can run it from the source repository. + +As an example, here is how you would add a source file to a build target: + + mesonrewriter add --target=program --filename=new_source.c + +## Shared modules + +The new `shared_module` function allows the creation of shared +modules, that is, extension modules such as plugins that are meant to +be used solely with `dlopen` rather than linking them to targets. + +## Gnome module + +- Detect required programs and print useful errors if missing + +### gtkdoc + +- Allow passing a list of directories to `src_dir` keyword argument +- Add `namespace` keyword argument +- Add `mode` keyword argument +- Fix `gtkdoc-scangobj` finding local libraries + +### compile_resources + +- Add `gresource_bundle` keyword argument to output `.gresource` files +- Add `export` and `install_header` keyword arguments +- Use depfile support available in GLib >= 2.52.0 + +## i18n module + +- Add `merge_file()` function for creating translated files +- Add `preset` keyword argument to included common gettext flags +- Read languages from `LINGUAS` file + +## LLVM IR compilation + +Meson has long had support for compiling assembler (GAS) files. In +this release we add support for compiling LLVM IR files in a similar +way when building with the Clang compiler. Just add it to the list of +files when creating a `library` or `executable` target like any other +source file. No special handling is required: + +```meson +executable('some-exe', 'main.c', 'asm-file.S', 'ir-file.ll') +``` + +As always, you can also mix LLVM IR files with C++, C, and Assembly +(GAS) sources. + +## ViM indent and syntax files + +We now include filetype, indent, and syntax files for ViM [with the +source +tree](https://github.com/mesonbuild/meson/tree/master/data/syntax-highlighting/vim). +Please file issues (or pull requests!) for enhancements or if you face +any problems using them. + +## Push URLs in .wrap files + +[.wrap files](Using-the-WrapDB.md) for subprojects can now include a +separate push URL to allow developers to push changes directly from a +subproject git checkout. + +## pkg-config dependencies + +Meson now supports multiple version restrictions while searching for pkg-config dependencies. + +```meson +# Just want a lower limit +dependency('zlib', version : '>1.2.1') +# Want both a lower and an upper limit +dependency('opencv', version : ['>=2.3.0', '<=3.1.0']) +# Want to exclude one specific broken version +dependency('foolite', version : ['>=3.12.1', '!=3.13.99']) +``` + +## Overriding more binaries with environment variables + +You can now specify the binary to be used for the following tools by +setting the corresponding environment variable + +| Name | Environment variable | +| ---- | -------------------- | +| pkg-config | PKG_CONFIG | +| readelf | READELF | +| nm | NM | + +## Support for `localstatedir` + +Similar to other options such as `bindir` and `datadir`, you can now +specify the `localstatedir` for a project by passing +`--localstatedir=dir` to `meson` or `-Dlocalstatedir=dir` to +`mesonconf` after configuration. You can also access it from inside +the `meson.build` file with `get_option('localstatedir')`. + +## New compiler function `symbols_have_underscore_prefix` + +Checks if the compiler prefixes an underscore to C global symbols with +the default calling convention. This is useful when linking to +compiled assembly code, or other code that does not have its C symbol +mangling handled transparently by the compiler. + +```meson +cc = meson.get_compiler('c') +conf = configuration_data() +if cc.symbols_have_underscore_prefix() + conf.set('SYMBOLS_HAVE_UNDERSCORE', true) +endif +``` + +C symbol mangling is platform and architecture dependent, and a helper +function is needed to detect it. For example, Windows 32-bit prefixes +underscore, but 64-bit does not. Linux does not prefix an underscore +but OS X does. + +## Vala + +GLib Resources compiled with +[`gnome.compile_resources`](Gnome-module.md#compile_resources) that +are added to the sources of a Vala build target will now cause the +appropriate `--gresources` flag to be passed to the Vala compiler so +you don't need to add that yourself to `vala_args:`. + +## Improvements to install scripts + +You can now pass arguments to install scripts added with +[`meson.add_install_script()`](Reference-manual.md#meson-object). All +arguments after the script name will be passed to the script. + +The `MESON_INSTALL_DESTDIR_PREFIX` environment variable is now set +when install scripts are called. This contains the values of the +`DESTDIR` environment variable and the `prefix` option passed to Meson +joined together. This is useful because both those are usually +absolute paths, and joining absolute paths in a cross-platform way is +tricky. [`os.path.join` in +Python](https://docs.python.org/3/library/os.path.html#os.path.join) +will discard all previous path segments when it encounters an absolute +path, and simply concatenating them will not work on Windows where +absolute paths begin with the drive letter. + +## More install directories + +Added new options `sbindir` and `infodir` that can be used for +installation. diff --git a/meson/docs/markdown/Release-notes-for-0.38.0.md b/meson/docs/markdown/Release-notes-for-0.38.0.md new file mode 100644 index 000000000..152308d0e --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.38.0.md @@ -0,0 +1,123 @@ +--- +title: Release 0.38 +short-description: Release notes for 0.38 +... + +## Uninstall target + +Meson allows you to uninstall an install step by invoking the +uninstall target. This will remove all files installed as part of +install. Note that this does not restore the original files. This also +does not undo changes done by custom install scripts (because they can +do arbitrary install operations). + +## Support for arbitrary test setups + +Sometimes you need to run unit tests with special settings. For +example under Valgrind. Usually this requires extra command line +options for the tool. This is supported with the new *test setup* +feature. For example to set up a test run with Valgrind, you'd write +this in a `meson.build` file: + +```meson +add_test_setup('valgrind', + exe_wrapper : [vg, '--error-exitcode=1', '--leak-check=full'], + timeout_multiplier : 100) +``` + +This tells Meson to run tests with Valgrind using the given options +and multiplying the test timeout values by 100. To run this test setup +simply issue the following command: + +```console +$ mesontest --setup=valgrind +``` + +## Intel C/C++ compiler support + +As usual, just set `CC=icc CXX=icpc` and Meson will use it as the +C/C++ compiler. Currently only Linux is supported. + +## Get values from configuration data objects + +Now it is possible to query values stored in configuration data +objects. + +```meson +cdata.set('key', 'value') +cdata.get('key') # returns 'value' +cdata.get('nokey', 'default') # returns 'default' +cdata.get('nokey') # halts with an error +``` + +## Python 3 module support + +Building Python 3 extension modules has always been possible, but it +is now even easier: + +```meson +py3_mod = import('python3') +pylib = py3_mod.extension_module('modname', + 'modsource.c', + dependencies : py3_dep) +``` + +## Default options to subprojects + +Projects can specify overriding values for subprojects' +`default_options` when invoking a subproject: + +```meson +subproject('foo', default_options : ['optname=overridevalue']) +dependency('some-dep', fallback : ['some_subproject', 'some_dep'], default_options : ['optname=overridevalue']) +``` + +The effect is the same as if the default options were written in the +subproject's `project` call. + +## Set targets to be built (or not) by default + +Build targets got a new keyword `build_by_default` which tells whether +the target should be built by default when running e.g. `ninja`. +Custom targets are not built by default but other targets are. Any +target that is tagged as installed or to be built always is also built +by default, regardless of the value of this keyword. + +## Add option to mesonconf to wipe cached data. + +Meson caches the results of dependency lookups. Sometimes these may +get out of sync with the system state. Mesonconf now has a +`--clearcache` option to clear these values so they will be +re-searched from the system upon next compile. + +## Can specify file permissions and owner when installing data + +The new `install_mode` keyword argument can be used to specify file +permissions and uid/gid of files when doing the install. This allows +you to, for example, install suid root scripts. + +## `has_header()` checks are now faster + +When using compilers that implement the [`__has_include()` +preprocessor +macro](https://clang.llvm.org/docs/LanguageExtensions.html#include-file-checking-macros), +the check is now ~40% faster. + +## Array indexing now supports fallback values + +The second argument to the array +[`.get()`](Reference-manual.md#array-object) function is now returned +if the specified index could not be found + +```meson +array = [10, 11, 12, 13] +array.get(0) # this will return `10` +array.get(4) # this will give an error about invalid index +array.get(4, 0) # this will return `0` +``` + +## Silent mode for Mesontest + +The Meson test executor got a new argument `-q` (and `--quiet`) that +suppresses all output of successful tests. This makes interactive +usage nicer because only errors are printed. diff --git a/meson/docs/markdown/Release-notes-for-0.39.0.md b/meson/docs/markdown/Release-notes-for-0.39.0.md new file mode 100644 index 000000000..73b49ae8d --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.39.0.md @@ -0,0 +1,16 @@ +--- +title: Release 0.39 +short-description: Release notes for 0.39 +... + +The 0.39.0 release turned out to consist almost entirely of bug fixes +and minor polishes. + +# New features + +## Extra arguments for tests + +The Meson test executable allows specifying extra command line +arguments to pass to test executables. + + mesontest --test-args=--more-debug-info currenttest diff --git a/meson/docs/markdown/Release-notes-for-0.40.0.md b/meson/docs/markdown/Release-notes-for-0.40.0.md new file mode 100644 index 000000000..53bc9bad9 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.40.0.md @@ -0,0 +1,152 @@ +--- +title: Release 0.40 +short-description: Release notes for 0.40 +... + +# New features + +## Outputs of generators can be used in custom targets in the VS backend + +This has been possible with the Ninja backend for a long time but now +the Visual Studio backend works too. + +## `compute_int` method in the compiler objects + +This method can be used to evaluate the value of an expression. As an +example: + +```meson +cc = meson.get_compiler('c') +two = cc.compute_int('1 + 1') # A very slow way of adding two numbers. +``` + +## Visual Studio 2017 support + +There is now a VS2017 backend (`--backend=vs2017`) as well as a +generic VS backend (`--backend=vs`) that autodetects the currently +active VS version. + +## Automatic initialization of subprojects that are git submodules + +If you have a directory inside your subprojects directory (usually +`subprojects/`) that is a git submodule, Meson will automatically +initialize it if your build files refer to it. This will be done +without needing a wrap file since git contains all the information +needed. + +## No download mode for wraps + +Added a new option `wrap-mode` that can be toggled to prevent Meson +from downloading dependency projects. Attempting to do so will cause +an error. This is useful for distro packagers and other cases where +you must explicitly enforce that nothing is downloaded from the net +during configuration or build. + +## Overriding options per target + +Build targets got a new keyword argument `override_options` that can +be used to override system options. As an example if you have a target +that you know can't be built with `-Werror` enabled you can override +the value of the option like this: + +```meson +executable('foo', 'foo.c', override_options : ['werror=false']) +``` + +Note that this does not affect project options, only those options +that come from Meson (language standards, unity builds etc). + +## Compiler object get define + +Compiler objects got a new method `get_define()` that returns the +given preprocessor symbol as a string. + +```meson +cc = meson.get_compiler('c') +one = cc.get_define('__linux__') # returns '1' on Linux hosts +``` + +## Cygwin support + +Meson now works under Cygwin and we have added it to our CI test +matrix. + +## Multiple install directories + +Custom targets can produce many output files. Previously it was only +possible to install all of them in the same directory, but now you can +install each output in its own directory like this: + +```meson +custom_target('two_out', + output : ['diff.h', 'diff.sh'], + command : [creator, '@OUTDIR@'], + install : true, + install_dir : ['dir1', 'dir2']) +``` + +For backwards compatibility and for conciseness, if you only specify +one directory all outputs will be installed into it. + +The same feature is also available for Vala build targets. For +instance, to install a shared library built by valac, the generated +header, and the generated VAPI (respectively) into the default +locations, you can do: + +```meson +shared_library('valalib', 'mylib.vala', + install : true, + install_dir : [true, true, true]) +``` + +To install any of the three in a custom directory, just pass it as a +string instead of `true`. To not install it, pass `false`. You can +also pass a single string (as before) and it will cause only the +library to be installed, so this is a backwards-compatible change. + +## Can specify method of obtaining dependencies + +Some dependencies have many ways of being provided. As an example Qt +can either be detected via `pkg-config` or `qmake`. Until now Meson +has had a heuristic for selecting which method to choose but sometimes +it does the wrong thing. This can now be overridden with the `method` +keyword like this: + +```meson +qt5_dep = dependency('qt5', modules : 'core', method : 'qmake') +``` + +## Link whole contents of static libraries + +The default behavior of static libraries is to discard all symbols +that are not not directly referenced. This may lead to exported +symbols being lost. Most compilers support "whole archive" linking +that includes all symbols and code of a given static library. This is +exposed with the `link_whole` keyword. + +```meson +shared_library('foo', 'foo.c', link_whole : some_static_library) +``` + +Note that Visual Studio compilers only support this functionality on +VS 2015 and newer. + +## Unity builds only for subprojects + +Up until now unity builds were either done for every target or none of +them. Now unity builds can be specified to be enabled only for +subprojects, which change seldom, and not for the master project, +which changes a lot. This is enabled by setting the `unity` option to +`subprojects`. + +## Running `mesonintrospect` from scripts + +Meson now sets the `MESONINTROSPECT` environment variable in addition +to `MESON_SOURCE_ROOT` and other variables when running scripts. It is +guaranteed to point to the correct `mesonintrospect` script, which is +important when running Meson uninstalled from git or when your `PATH`s +may not be set up correctly. + +Specifically, the following Meson functions will set it: +`meson.add_install_script()`, `meson.add_postconf_script()`, +`run_command()`, `run_target()`. diff --git a/meson/docs/markdown/Release-notes-for-0.41.0.md b/meson/docs/markdown/Release-notes-for-0.41.0.md new file mode 100644 index 000000000..8ce181a71 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.41.0.md @@ -0,0 +1,84 @@ +--- +title: Release 0.41 +short-description: Release notes for 0.41 +... + +# New features + +## Dependency Handler for LLVM + +Native support for linking against LLVM using the `dependency` function. + +## vcs_tag keyword fallback is now optional + +The `fallback` keyword in `vcs_tag()` is now optional. If not given, +its value defaults to the return value of `meson.project_version()`. + +## Better quoting of special characters in ninja command invocations + +The ninja backend now quotes special characters that may be +interpreted by ninja itself, providing better interoperability with +custom commands. This support may not be perfect; please report any +issues found with special characters to the issue tracker. + +## Pkgconfig support for custom variables + +The Pkgconfig module object can add arbitrary variables to the generated .pc +file with the new `variables` keyword: +```meson +pkg.generate(libraries : libs, + subdirs : h, + version : '1.0', + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + variables : ['datadir=${prefix}/data']) +``` + +## A target for creating tarballs + +Creating distribution tarballs is simple: + + ninja dist + +This will create a `.tar.xz` archive of the source code including +submodules without any revision control information. This command also +verifies that the resulting archive can be built, tested and +installed. This is roughly equivalent to the `distcheck` target in +other build systems. Currently this only works for projects using Git +and only with the Ninja backend. + +## Support for passing arguments to Rust compiler + +Targets for building rust now take a `rust_args` keyword. + +## Code coverage export for tests + +Code coverage can be generated for tests by passing the `--cov` argument to +the `run_tests.py` test runner. Note, since multiple processes are used, +coverage must be combined before producing a report (`coverage3 combine`). + +## Reproducible builds + +All known issues have been fixed and Meson can now build reproducible Debian +packages out of the box. + +## Extended template substitution in configure_file + +The output argument of `configure_file()` is parsed for @BASENAME@ and +@PLAINNAME@ substitutions. + +## Cross-config property for overriding whether an exe wrapper is needed + +The new `needs_exe_wrapper` property allows overriding auto-detection for +cases where `build_machine` appears to be compatible with `host_machine`, +but actually isn't. For example when: +- `build_machine` is macOS and `host_machine` is the iOS Simulator +- the `build_machine`'s libc is glibc but the `host_machine` libc is uClibc +- code relies on kernel features not available on the `build_machine` + +## Support for capturing stdout of a command in configure_file + +`configure_file()` now supports a new keyword - `capture`. When this argument +is set to true, Meson captures `stdout` of the `command` and writes it to +the target file specified as `output`. diff --git a/meson/docs/markdown/Release-notes-for-0.42.0.md b/meson/docs/markdown/Release-notes-for-0.42.0.md new file mode 100644 index 000000000..585380b1a --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.42.0.md @@ -0,0 +1,148 @@ +--- +title: Release 0.42 +short-description: Release notes for 0.42 +... + +# New features + +## Distribution tarballs from Mercurial repositories + +Creating distribution tarballs can now be made out of projects based +on Mercurial. As before, this remains possible only with the Ninja +backend. + +## Keyword argument verification + +Meson will now check the keyword arguments used when calling any +function and print a warning if any of the keyword arguments is not +known. In the future this will become a hard error. + +## Add support for Genie to Vala compiler + +The Vala compiler has an alternative syntax, Genie, that uses the +`.gs` file extension. Meson now recognises and uses Genie files. + +## Pkgconfig support for additional cflags + +The Pkgconfig module object can add arbitrary extra cflags to the Cflags +value in the .pc file, using the "extra_cflags" keyword: +```meson +pkg.generate(libraries : libs, + subdirs : h, + version : '1.0', + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + extra_cflags : '-Dfoo' ) +``` + +## Base options accessible via get_option() + +Base options are now accessible via the get_option() function. +```meson +uses_lto = get_option('b_lto') +``` + +## Allow crate type configuration for Rust compiler + +Rust targets now take an optional `rust_crate_type` keyword, allowing +you to set the crate type of the resulting artifact. Valid crate types +are `dylib` or `cdylib` for shared libraries, and `rlib` or +`staticlib` for static libraries. For more, see Rust's [linkage +reference][rust-linkage]. + +[rust-linkage]: https://doc.rust-lang.org/reference/linkage.html + +## Simultaneous use of Address- and Undefined Behavior Sanitizers + +Both the address- and undefined behavior sanitizers can now be used +simultaneously by passing `-Db_sanitize=address,undefined` to Meson. + +## Unstable SIMD module + +A new experimental module to compile code with many different SIMD +instruction sets and selecting the best one at runtime. This module +is unstable, meaning its API is subject to change in later releases. +It might also be removed altogether. + + +## Import libraries for executables on Windows + +The new keyword `implib` to `executable()` allows generation of an import +library for the executable. + +## Added build_rpath keyword argument + +You can specify `build_rpath : '/foo/bar'` in build targets and the +given path will get added to the target's rpath in the build tree. It +is removed during the install step. + +Meson will print a warning when the user tries to add an rpath linker +flag manually, e.g. via `link_args` to a target. This is not +recommended because having multiple rpath causes them to stomp on each +other. This warning will become a hard error in some future release. + +## Vulkan dependency module + +Vulkan can now be used as native dependency. The dependency module +will detect the VULKAN_SDK environment variable or otherwise try to +receive the vulkan library and header via pkgconfig or from the +system. + +## Limiting the maximum number of linker processes + +With the Ninja backend it is now possible to limit the maximum number of +concurrent linker processes. This is usually only needed for projects +that have many large link steps that cause the system to run out of +memory if they are run in parallel. This limit can be set with the +new `backend_max_links` option. + +## Disable implicit include directories + +By default Meson adds the current source and build directories to the +header search path. On some rare occasions this is not desired. Setting +the `implicit_include_directories` keyword argument to `false` these +directories are not used. + +## Support for MPI dependency + +MPI is now supported as a dependency. Because dependencies are +language-specific, you must specify the requested language with the +`language` keyword, i.e., `dependency('mpi', language='c')` will +request the C MPI headers and libraries. See [the MPI +dependency](Dependencies.md#mpi) for more information. + +## Allow excluding files or directories from `install_subdir` + +The [`install_subdir`](Reference-manual.md#install_subdir) command +accepts the new `exclude_files` and `exclude_directories` keyword +arguments that allow specified files or directories to be excluded +from the installed subdirectory. + +## Make all Meson functionality invokable via the main executable + +Previously Meson had multiple executables such as `mesonintrospect` +and `mesontest`. They are now invokable via the main Meson executable +like this: + + meson configure # equivalent to mesonconf + meson test # equivalent to mesontest + +The old commands are still available but they are deprecated +and will be removed in some future release. + +## Pcap dependency detector + +Meson will automatically obtain dependency information for pcap +using the `pcap-config` tool. It is used like any other dependency: + +```meson +pcap_dep = dependency('pcap', version : '>=1.0') +``` + +## GNOME module mkenums_simple() addition + +Most libraries and applications use the same standard templates for +glib-mkenums. There is now a new `mkenums_simple()` convenience method +that passes those default templates to glib-mkenums and allows some tweaks +such as optional function decorators or leading underscores. diff --git a/meson/docs/markdown/Release-notes-for-0.43.0.md b/meson/docs/markdown/Release-notes-for-0.43.0.md new file mode 100644 index 000000000..1ce774dec --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.43.0.md @@ -0,0 +1,125 @@ +--- +title: Release 0.43 +short-description: Release notes for 0.43 +... + +## Portability improvements to Boost Dependency + +The Boost dependency has been improved to better detect the various +ways to install boost on multiple platforms. At the same time the +`modules` semantics for the dependency has been changed. Previously it +was allowed to specify header directories as `modules` but it wasn't +required. Now, modules are only used to specify libraries that require +linking. + +This is a breaking change and the fix is to remove all modules that aren't +found. + +## Generator learned capture + +Generators can now be configured to capture the standard output. See +`test cases/common/98 gen extra/meson.build` for an example. + +## Can index CustomTarget objects + +The `CustomTarget` object can now be indexed like an array. The +resulting object can be used as a source file for other Targets, this +will create a dependency on the original `CustomTarget`, but will only +insert the generated file corresponding to the index value of the +`CustomTarget`'s `output` keyword. + +```meson +c = custom_target( + ... + output : ['out.h', 'out.c'], +) +lib1 = static_library( + 'lib1', + [lib1_sources, c[0]], + ... +) +exec = executable( + 'executable', + c[1], + link_with : lib1, +) +``` + +## Can override executables in the cross file + +The cross file can now be used for overriding the result of +`find_program`. As an example if you want to find the `objdump` +command and have the following definition in your cross file: + +```ini +[binaries] +... +objdump = '/usr/bin/arm-linux-gnueabihf-objdump-6' +``` + +Then issuing the command `find_program('objdump')` will return the +version specified in the cross file. If you need the build machine's +objdump, you can specify the `native` keyword like this: + +```meson +native_objdump = find_program('objdump', native : true) +``` + +## Easier handling of supported compiler arguments + +A common pattern for handling multiple desired compiler arguments, was +to test their presence and add them to an array one-by-one, e.g.: + +```meson +warning_flags_maybe = [ + '-Wsomething', + '-Wanother-thing', + '-Wno-the-other-thing', +] +warning_flags = [] +foreach flag : warning_flags_maybe + if cc.has_argument(flag) + warning_flags += flag + endif +endforeach +cc.add_project_argument(warning_flags) +``` + +A helper has been added for the foreach/has_argument pattern, so you +can now simply do: + +```meson +warning_flags = [ ... ] +flags = cc.get_supported_arguments(warning_flags) +``` + +## Better support for shared libraries in non-system paths + +Meson has support for prebuilt object files and static libraries. This +release adds feature parity to shared libraries that are either in +non-standard system paths or shipped as part of your project. On +systems that support rpath, Meson automatically adds rpath entries to +built targets using manually found external libraries. + +This means that e.g. supporting prebuilt libraries shipped with your +source or provided by subprojects or wrap definitions by writing a +build file like this: + +```meson +project('myprebuiltlibrary', 'c') + +cc = meson.get_compiler('c') +prebuilt = cc.find_library('mylib', dirs : meson.current_source_dir()) +mydep = declare_dependency(include_directories : include_directories('.'), + dependencies : prebuilt) +``` + +Then you can use the dependency object in the same way as any other. + +## wrap-svn + +The [Wrap dependency system](Wrap-dependency-system-manual.md) now +supports [Subversion](https://subversion.apache.org/) (svn). This +support is rudimentary. The repository url has to point to a specific +(sub)directory containing the `meson.build` file (typically `trunk/`). +However, providing a `revision` is supported. diff --git a/meson/docs/markdown/Release-notes-for-0.44.0.md b/meson/docs/markdown/Release-notes-for-0.44.0.md new file mode 100644 index 000000000..8e5c09846 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.44.0.md @@ -0,0 +1,154 @@ +--- +title: Release 0.44 +short-description: Release notes for 0.44 +... + +# New features + +## Added warning function + +This function prints its argument to the console prefixed by +"WARNING:" in yellow color. A simple example: + +warning('foo is deprecated, please use bar instead') + + +## Adds support for additional Qt5-Module keyword `moc_extra_arguments` + +When `moc`-ing sources, the `moc` tool does not know about any +preprocessor macros. The generated code might not match the input +files when the linking with the moc input sources happens. + +This amendment allows to specify a a list of additional arguments +passed to the `moc` tool. They are called `moc_extra_arguments`. + + +## Prefix-dependent defaults for sysconfdir, localstatedir and sharedstatedir + +These options now default in a way consistent with +[FHS](http://refspecs.linuxfoundation.org/fhs.shtml) and common usage. + +If prefix is `/usr`, default sysconfdir to `/etc`, localstatedir to `/var` and +sharedstatedir to `/var/lib`. + +If prefix is `/usr/local` (the default), default localstatedir to +`/var/local` and sharedstatedir to `/var/local/lib`. + + +## An array type for user options + +Previously to have an option that took more than one value a string +value would have to be created and split, but validating this was +difficult. A new array type has been added to the `meson_options.txt` +for this case. It works like a 'combo', but allows more than one +option to be passed. The values can optionally be validated against a +list of valid values. When used on the command line (with -D), values +are passed as a comma separated list. + +```meson +option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one']) +``` + +These can be overwritten on the command line, + +```meson +meson _build -Darray_opt=two,three +``` + +## LLVM dependency supports both dynamic and static linking + +The LLVM dependency has been improved to consistently use dynamic +linking. Previously recent version (>= 3.9) would link dynamically +while older versions would link statically. + +Now LLVM also accepts the `static` keyword to enable statically +linking to LLVM modules instead of dynamically linking. + + +## Added `if_found` to subdir + +Added a new keyword argument to the `subdir` command. It is given a +list of dependency objects and the function will only recurse in the +subdirectory if they are all found. Typical usage goes like this. + +```meson +d1 = dependency('foo') # This is found +d2 = dependency('bar') # This is not found + +subdir('somedir', if_found : [d1, d2]) +``` + +In this case the subdirectory would not be entered since `d2` could +not be found. + +## `get_unquoted()` method for the `configuration` data object + +New convenience method that allows reusing a variable value +defined quoted. Useful in C for config.h strings for example. + + +## Added disabler object + +A disabler object is a new kind of object that has very specific +semantics. If it is used as part of any other operation such as an +argument to a function call, logical operations etc, it will cause the +operation to not be evaluated. Instead the return value of said +operation will also be the disabler object. + +For example if you have an setup like this: + +```meson +dep = dependency('foo') +lib = shared_library('mylib', 'mylib.c', + dependencies : dep) +exe = executable('mytest', 'mytest.c', + link_with : lib) +test('mytest', exe) +``` + +If you replace the dependency with a disabler object like this: + +```meson +dep = disabler() +lib = shared_library('mylib', 'mylib.c', + dependencies : dep) +exe = executable('mytest', 'mytest.c', + link_with : lib) +test('mytest', exe) +``` + +Then the shared library, executable and unit test are not +created. This is a handy mechanism to cut down on the number of `if` +statements. + + +## Config-Tool based dependencies gained a method to get arbitrary options + +A number of dependencies (CUPS, LLVM, pcap, WxWidgets, GnuStep) use a +config tool instead of pkg-config. As of this version they now have a +`get_configtool_variable` method, which is analogous to the +`get_pkgconfig_variable` for pkg config. + +```meson +dep_llvm = dependency('LLVM') +llvm_inc_dir = dep_llvm.get_configtool_variable('includedir') +``` + +## Embedded Python in Windows MSI packages + +Meson now ships an internal version of Python in the MSI installer +packages. This means that it can run Python scripts that are part of +your build transparently. That is, if you do the following: + +```meson +myprog = find_program('myscript.py') +``` + +Then Meson will run the script with its internal Python version if necessary. + +## Libwmf dependency now supports libwmf-config + +Earlier, `dependency('libwmf')` could only detect the library with +pkg-config files. Now, if pkg-config files are not found, Meson will +look for `libwmf-config` and if it's found, will use that to find the +library. diff --git a/meson/docs/markdown/Release-notes-for-0.45.0.md b/meson/docs/markdown/Release-notes-for-0.45.0.md new file mode 100644 index 000000000..9dd56e31a --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.45.0.md @@ -0,0 +1,203 @@ +--- +title: Release 0.45 +short-description: Release notes for 0.45 +... + +# New features + +## Python minimum version is now 3.5 + +Meson will from this version on require Python version 3.5 or newer. + +## Config-Tool based dependencies can be specified in a cross file + +Tools like LLVM and pcap use a config tool for dependencies, this is a +script or binary that is run to get configuration information (cflags, +ldflags, etc) from. + +These binaries may now be specified in the `binaries` section of a +cross file. + +```ini +[binaries] +cc = ... +llvm-config = '/usr/bin/llvm-config32' +``` + +## Visual Studio C# compiler support + +In addition to the Mono C# compiler we also support Visual Studio's C# +compiler. Currently this is only supported on the Ninja backend. + +## Removed two deprecated features + +The standalone `find_library` function has been a no-op for a long +time. Starting with this version it becomes a hard error. + +There used to be a keywordless version of `run_target` which looked +like this: + +```meson +run_target('targetname', 'command', 'arg1', 'arg2') +``` + +This is now an error. The correct format for this is now: + +```meson +run_target('targetname', + command : ['command', 'arg1', 'arg2']) +``` + +## Experimental FPGA support + +This version adds support for generating, analysing and uploading FPGA +programs using the [IceStorm +toolchain](http://www.clifford.at/icestorm/). This support is +experimental and is currently limited to the `iCE 40` series of FPGA +chips. + +FPGA generation integrates with other parts of Meson seamlessly. As an +example, [here](https://github.com/jpakkane/lm32) is an example +project that compiles a simple firmware into Verilog and combines that +with an lm32 softcore processor. + +## Generator outputs can preserve directory structure + +Normally when generating files with a generator, Meson flattens the +input files so they all go in the same directory. Some code +generators, such as Protocol Buffers, require that the generated files +have the same directory layout as the input files used to generate +them. This can now be achieved like this: + +```meson +g = generator(...) # Compiles protobuf sources +generated = gen.process('com/mesonbuild/one.proto', + 'com/mesonbuild/two.proto', + preserve_path_from : meson.current_source_dir()) +``` + +This would cause the following files to be generated inside the target +private directory: + + com/mesonbuild/one.pb.h + com/mesonbuild/one.pb.cc + com/mesonbuild/two.pb.h + com/mesonbuild/two.pb.cc + +## Hexadecimal string literals + +Hexadecimal integer literals can now be used in build and option files. + +```meson +int_255 = 0xFF +``` + +## b_ndebug : if-release + +The value `if-release` can be given for the `b_ndebug` project option. + +This will make the `NDEBUG` pre-compiler macro to be defined for +release type builds as if the `b_ndebug` project option had had the +value `true` defined for it. + +## `install_data()` defaults to `{datadir}/{projectname}` + +If `install_data()` is not given an `install_dir` keyword argument, the +target directory defaults to `{datadir}/{projectname}` (e.g. +`/usr/share/myproj`). + +## install_subdir() supports strip_directory + +If strip_directory=true install_subdir() installs directory contents +instead of directory itself, stripping basename of the source directory. + +## Integer options + +There is a new integer option type with optional minimum and maximum +values. It can be specified like this in the `meson_options.txt` file: + +```meson +option('integer_option', type : 'integer', min : 0, max : 5, value : 3) +``` + +## New method meson.project_license() + +The `meson` builtin object now has a `project_license()` method that +returns a list of all licenses for the project. + +## Rust cross-compilation + +Cross-compilation is now supported for Rust targets. Like other +cross-compilers, the Rust binary must be specified in your cross file. +It should specify a `--target` (as installed by `rustup target`) and a +custom linker pointing to your C cross-compiler. For example: + +```ini +[binaries] +c = '/usr/bin/arm-linux-gnueabihf-gcc-7' +rust = [ + 'rustc', + '--target', 'arm-unknown-linux-gnueabihf', + '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7', +] +``` + +## Rust compiler-private library disambiguation + +When building a Rust target with Rust library dependencies, an +`--extern` argument is now specified to avoid ambiguity between the +dependency library, and any crates of the same name in `rustc`'s +private sysroot. + +## Project templates + +Meson ships with predefined project templates. To start a new project +from scratch, simply go to an empty directory and type: + + meson init --name=myproject --type=executable --language=c + +## Improve test setup selection + +Test setups are now identified (also) by the project they belong to +and it is possible to select the used test setup from a specific +project. E.g. to use a test setup `some_setup` from project +`some_project` for all executed tests one can use + + meson test --setup some_project:some_setup + +Should one rather want test setups to be used from the same project as +where the current test itself has been defined, one can use just + + meson test --setup some_setup + +In the latter case every (sub)project must have a test setup `some_setup` +defined in it. + +## Can use custom targets as Windows resource files + +The `compile_resources()` function of the `windows` module can now be +used on custom targets as well as regular files. + +## Can promote dependencies with wrap command + +The `promote` command makes it easy to copy nested dependencies to the +top level. + + meson wrap promote scommon + +This will search the project tree for a subproject called `scommon` +and copy it to the top level. + +If there are many embedded subprojects with the same name, you have to +specify which one to promote manually like this: + + meson wrap promote subprojects/s1/subprojects/scommon + +## Yielding subproject option to superproject + +Normally project options are specific to the current project. However +sometimes you want to have an option whose value is the same over all +projects. This can be achieved with the new `yield` keyword for +options. When set to `true`, getting the value of this option in +`meson.build` files gets the value from the option with the same name +in the master project (if such an option exists). diff --git a/meson/docs/markdown/Release-notes-for-0.46.0.md b/meson/docs/markdown/Release-notes-for-0.46.0.md new file mode 100644 index 000000000..64e237d0e --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.46.0.md @@ -0,0 +1,329 @@ +--- +title: Release 0.46 +short-description: Release notes for 0.46 +... + +# New features + +## Allow early return from a script + +Added the function `subdir_done()`. Its invocation exits the current +script at the point of invocation. All previously invoked build +targets and commands are build/executed. All following ones are +ignored. If the current script was invoked via `subdir()` the parent +script continues normally. + +## Log output slightly changed + +The format of some human-readable diagnostic messages has changed in +minor ways. In case you are parsing these messages, you may need to +adjust your code. + +## ARM compiler for C and CPP + +Cross-compilation is now supported for ARM targets using ARM compiler +- ARMCC. The current implementation does not support shareable +libraries. The default extension of the output is .axf. The +environment path should be set properly for the ARM compiler +executables. The '--cpu' option with the appropriate target type +should be mentioned in the cross file as shown in the snippet below. + +```ini +[properties] +c_args = ['--cpu=Cortex-M0plus'] +cpp_args = ['--cpu=Cortex-M0plus'] + +``` + +## Building both shared and static libraries + +A new function `both_libraries()` has been added to build both shared +and static libraries at the same time. Source files will be compiled +only once and object files will be reused to build both shared and +static libraries, unless `b_staticpic` user option or `pic:` keyword +argument are set to false in which case sources will be compiled +twice. + +The returned `buildtarget` object always represents the shared library. + +## Compiler object can now be passed to run_command() + +This can be used to run the current compiler with the specified +arguments to obtain additional information from it. One of the use +cases is to get the location of development files for the GCC plugins: + +```meson +cc = meson.get_compiler('c') +result = run_command(cc, '-print-file-name=plugin') +plugin_dev_path = result.stdout().strip() +``` + +## declare_dependency() now supports `link_whole:` + +`declare_dependency()` now supports the `link_whole:` keyword argument which +transparently works for build targets which use that dependency. + +## Old command names are now errors + +The old executable names `mesonintrospect`, `mesonconf`, +`mesonrewriter` and `mesontest` have been deprecated for a long time. +Starting from this version they no longer do anything but instead +always error out. All functionality is available as subcommands in the +main `meson` binary. + +## Meson and meson configure now accept the same arguments + +Previously Meson required that builtin arguments (like prefix) be +passed as `--prefix` to `meson` and `-Dprefix` to `meson configure`. +`meson` now accepts -D form like `meson configure` has. `meson +configure` also accepts the `--prefix` form, like `meson` has. + +## Recursively extract objects + +The `recursive:` keyword argument has been added to +`extract_all_objects()`. When set to `true` it will also return +objects passed to the `objects:` argument of this target. By default +only objects built for this target are returned to maintain backward +compatibility with previous versions. The default will eventually be +changed to `true` in a future version. + +```meson +lib1 = static_library('a', 'source.c', objects : 'prebuilt.o') +lib2 = static_library('b', objects : lib1.extract_all_objects(recursive : true)) +``` + +## Can override find_program() + +It is now possible to override the result of `find_program` to point +to a custom program you want. The overriding is global and applies to +every subproject from there on. Here is how you would use it. + +In master project + +```meson +subproject('mydep') +``` + +In the called subproject: + +```meson +prog = find_program('my_custom_script') +meson.override_find_program('mycodegen', prog) +``` + +In master project (or, in fact, any subproject): + +```meson +genprog = find_program('mycodegen') +``` + +Now `genprog` points to the custom script. If the dependency had come +from the system, then it would point to the system version. + +You can also use the return value of `configure_file()` to override +a program in the same way as above: + +```meson +prog_script = configure_file(input : 'script.sh.in', + output : 'script.sh', + configuration : cdata) +meson.override_find_program('mycodegen', prog_script) +``` + +## New functions: has_link_argument() and friends + +A new set of methods has been added to [compiler +objects](Reference-manual.md#compiler-object) to test if the linker +supports given arguments. + +- `has_link_argument()` +- `has_multi_link_arguments()` +- `get_supported_link_arguments()` +- `first_supported_link_argument()` + +## "meson help" now shows command line help + +Command line parsing is now less surprising. "meson help" is now +equivalent to "meson --help" and "meson help " is +equivalent to "meson --help", instead of creating a build +directory called "help" in these cases. + +## Autogeneration of simple meson.build files + +A feature to generate a meson.build file compiling given C/C++ source +files into a single executable has been added to "meson init". By +default, it will take all recognizable source files in the current +directory. You can also specify a list of dependencies with the -d +flag and automatically invoke a build with the -b flag to check if the +code builds with those dependencies. + +For example, + +```meson +meson init -fbd sdl2,gl +``` + +will look for C or C++ files in the current directory, generate a +meson.build for them with the dependencies of sdl2 and gl and +immediately try to build it, overwriting any previous meson.build and +build directory. + +## install_data() supports `rename:` + +The `rename:` keyword argument is used to change names of the installed +files. Here's how you install and rename the following files: + +- `file1.txt` into `share/myapp/dir1/data.txt` +- `file2.txt` into `share/myapp/dir2/data.txt` + +```meson +install_data(['file1.txt', 'file2.txt'], + rename : ['dir1/data.txt', 'dir2/data.txt'], + install_dir : 'share/myapp') +``` + +## Support for lcc compiler for e2k (Elbrus) architecture + +In this version, a support for lcc compiler for Elbrus processors +based on [e2k +microarchitecture](https://en.wikipedia.org/wiki/Elbrus_2000) has been +added. + +Examples of such CPUs: +* [Elbrus-8S](https://en.wikipedia.org/wiki/Elbrus-8S); +* Elbrus-4S; +* [Elbrus-2S+](https://en.wikipedia.org/wiki/Elbrus-2S%2B). + +Such compiler have a similar behavior as gcc (basic option compatibility), +but, in is not strictly compatible with gcc as of current version. + +Major differences as of version 1.21.22: +* it does not support LTO and PCH; +* it suffers from the same dependency file creation error as icc; +* it has minor differences in output, especially version output; +* it differently reacts to lchmod() detection; +* some backend messages are produced in ru_RU.KOI8-R even if LANG=C; +* its preprocessor treats some characters differently. + +So every noted difference is properly handled now in Meson. + +## String escape character sequence update + +Single-quoted strings in Meson have been taught the same set of escape +sequences as in Python. It is therefore now possible to use arbitrary +bytes in strings, like for example `NUL` (`\0`) and other ASCII +control characters. See the chapter about [*Strings* in +*Syntax*](Syntax.md#strings) for more details. + +Potential backwards compatibility issue: Any valid escape sequence +according to the new rules will be interpreted as an escape sequence +instead of the literal characters. Previously only the following +escape sequences were supported in single-quote strings: `\'`, `\\` +and `\n`. + +Note that the behaviour of triple-quoted (multiline) strings has not +changed. They behave like raw strings and do not support any escape +sequences. + +## New `forcefallback` wrap mode + +A new wrap mode was added, `--wrap-mode=forcefallback`. When this is +set, dependencies for which a fallback was provided will always use +it, even if an external dependency exists and satisfies the version +requirements. + +## Relaxing of target name requirements + +In earlier versions of Meson you could only have one target of a given +name for each type. For example you could not have two executables +named `foo`. This requirement is now relaxed so that you can have +multiple targets with the same name, as long as they are in different +subdirectories. + +Note that projects that have multiple targets with the same name can +not be built with the `flat` layout or any backend that writes outputs +in the same directory. + +## Addition of OpenMP dependency + +An OpenMP dependency (`openmp`) has been added that encapsulates the +various flags used by compilers to enable OpenMP and checks for the +existence of the `omp.h` header. The `language` keyword may be passed +to force the use of a specific compiler for the checks. + +## Added new partial_dependency method to dependencies and libraries + +It is now possible to use only part of a dependency in a target. This +allows, for example, to only use headers with convenience libraries to +avoid linking to the same library multiple times. + +```meson +dep = dependency('xcb') + +helper = static_library( + 'helper', + ['helper1.c', 'helper2.c'], + dependencies : dep.partial_dependency(includes : true), +] + +final = shared_library( + 'final', + ['final.c'], + dependencyes : dep, +) +``` + +A partial dependency will have the same name version as the full +dependency it is derived from, as well as any values requested. + +## Improved generation of pkg-config files for static only libraries. + +Previously pkg-config files generated by the pkgconfig modules for +static libraries with dependencies could only be used in a +dependencies with `static: true`. + +Now the generated file contains the needed dependencies libraries +directly within `Requires` and `Libs` for build static libraries +passed via the `libraries` keyword argument. + +Projects that install both a static and a shared version of a library +should use the result of +[`both_libraries()`](Reference-manual.md#both_libraries) to the +pkg-config file generator or use +[`configure_file()`](Reference-manual.md#configure_file) for more +complicated setups. + +## Improvements to pkgconfig module + +A `StaticLibrary` or `SharedLibrary` object can optionally be passed +as first positional argument of the `generate()` method. If one is provided a +default value will be provided for all required fields of the pc file: +- `install_dir` is set to `pkgconfig` folder in the same location than the provided library. +- `description` is set to the project's name followed by the library's name. +- `name` is set to the library's name. + +Generating a .pc file is now as simple as: + +```meson +pkgconfig.generate(mylib) +``` + +## pkgconfig.generate() requires parameters non-string arguments + +`pkgconfig.generate()` `requires:` and `requires_private:` keyword +arguments now accept pkgconfig-dependencies and libraries that +pkgconfig-files were generated for. + +## Generic python module + +Meson now has is a revamped and generic (python 2 and 3) version of +the python3 module. With [this new interface](Python-module.md), +projects can now fully specify the version of python they want to +build against / install sources to, and can do so against multiple +major or minor versions in parallel. + +## test() now supports the `depends:` keyword argument + +Build targets and custom targets can be listed in the `depends:` +keyword argument of test function. These targets will be built before +test is run even if they have `build_by_default : false`. diff --git a/meson/docs/markdown/Release-notes-for-0.47.0.md b/meson/docs/markdown/Release-notes-for-0.47.0.md new file mode 100644 index 000000000..175126ea9 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.47.0.md @@ -0,0 +1,312 @@ +--- +title: Release 0.47 +short-description: Release notes for 0.47 +... + +# New features + +## Allow early return from a script + +Added the function `subdir_done()`. Its invocation exits the current +script at the point of invocation. All previously invoked build +targets and commands are build/executed. All following ones are +ignored. If the current script was invoked via `subdir()` the parent +script continues normally. + +## Concatenate string literals returned from `get_define()` + +After obtaining the value of a preprocessor symbol consecutive string +literals are merged into a single string literal. For example a +preprocessor symbol's value `"ab" "cd"` is returned as `"abcd"`. + +## ARM compiler(version 6) for C and CPP + +Cross-compilation is now supported for ARM targets using ARM compiler +version 6 - ARMCLANG. The required ARMCLANG compiler options for +building a shareable library are not included in the current Meson +implementation for ARMCLANG support, so it can not build shareable +libraries. This current Meson implementation for ARMCLANG support can +not build assembly files with arm syntax (we need to use armasm +instead of ARMCLANG for the `.s` files with this syntax) and only +supports GNU syntax. + +The default extension of the executable output is `.axf`. The +environment path should be set properly for the ARM compiler +executables. The `--target`, `-mcpu` options with the appropriate +values should be mentioned in the cross file as shown in the snippet +below. + +```ini +[properties] +c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] +cpp_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] +``` + +Note: +- The current changes are tested on Windows only. +- PIC support is not enabled by default for ARM, + if users want to use it, they need to add the required arguments + explicitly from cross-file(`c_args`/`cpp_args`) or some other way. + +## New base build option for LLVM (Apple) bitcode support + +When building with clang on macOS, you can now build your static and +shared binaries with embedded bitcode by enabling the `b_bitcode` +[base option](Builtin-options.md#base-options) by passing +`-Db_bitcode=true` to Meson. + +This is better than passing the options manually in the environment +since Meson will automatically disable conflicting options such as +`b_asneeded`, and will disable bitcode support on targets that don't +support it such as `shared_module()`. + +Since this requires support in the linker, it is currently only +enabled when using Apple ld. In the future it can be extended to clang +on other platforms too. + +## New compiler check: `check_header()` + +The existing compiler check `has_header()` only checks if the header +exists, either with the `__has_include` C++11 builtin, or by running +the pre-processor. + +However, sometimes the header you are looking for is unusable on some +platforms or with some compilers in a way that is only detectable at +compile-time. For such cases, you should use `check_header()` which +will include the header and run a full compile. + +Note that `has_header()` is much faster than `check_header()`, so it +should be used whenever possible. + +## New action `copy:` for `configure_file()` + +In addition to the existing actions `configuration:` and `command:`, +[`configure_file()`](Reference-manual.md#configure_file) now accepts a +keyword argument `copy:` which specifies a new action to copy the file +specified with the `input:` keyword argument to a file in the build +directory with the name specified with the `output:` keyword argument. + +These three keyword arguments are, as before, mutually exclusive. You +can only do one action at a time. + +## New keyword argument `encoding:` for `configure_file()` + +Add a new keyword to +[`configure_file()`](Reference-manual.md#configure_file) that allows +the developer to specify the input and output file encoding. The +default value is the same as before: UTF-8. + +In the past, Meson would not handle non-UTF-8/ASCII files correctly, +and in the worst case would try to coerce it to UTF-8 and mangle the +data. UTF-8 is the standard encoding now, but sometimes it is +necessary to process files that use a different encoding. + +For additional details see [#3135](https://github.com/mesonbuild/meson/pull/3135). + +## New keyword argument `output_format:` for `configure_file()` + +When called without an input file, `configure_file` generates a C +header file by default. A keyword argument was added to allow +specifying the output format, for example for use with nasm or yasm: + +```meson +conf = configuration_data() +conf.set('FOO', 1) + +configure_file('config.asm', + configuration: conf, + output_format: 'nasm') +``` + +## Substitutions in `custom_target(depfile:)` + +The `depfile` keyword argument to `custom_target` now accepts the `@BASENAME@` +and `@PLAINNAME@` substitutions. + +## Deprecated `build_always:` for custom targets + +Setting `build_always` to `true` for a custom target not only marks +the target to be always considered out of date, but also adds it to +the set of default targets. This option is therefore deprecated and +the new option `build_always_stale` is introduced. + +`build_always_stale` *only* marks the target to be always considered +out of date, but does not add it to the set of default targets. The +old behaviour can be achieved by combining `build_always_stale` with +`build_by_default`. + +The documentation has been updated accordingly. + +## New built-in object type: dictionary + +Meson dictionaries use a syntax similar to python's dictionaries, but +have a narrower scope: they are immutable, keys can only be string +literals, and initializing a dictionary with duplicate keys causes a +fatal error. + +Example usage: + +```meson +d = {'foo': 42, 'bar': 'baz'} + +foo = d.get('foo') +foobar = d.get('foobar', 'fallback-value') + +foreach key, value : d + Do something with key and value +endforeach +``` + +## Array options treat `-Dopt=` and `-Dopt=[]` as equivalent + +Prior to this change passing -Dopt= to an array opt would be +interpreted as `['']` (an array with an empty string), now `-Dopt=` is +the same as `-Dopt=[]`, an empty list. + +## Feature detection based on `meson_version:` in `project()` + +Meson will now print a `WARNING:` message during configuration if you +use a function or a keyword argument that was added in a Meson version +that's newer than the version specified inside `project()`. For +example: + +```meson +project('featurenew', meson_version: '>=0.43') + +cdata = configuration_data() +cdata.set('FOO', 'bar') +message(cdata.get_unquoted('FOO')) +``` + +This will output: + +``` +The Meson build system +Version: 0.47.0.dev1 +Source dir: C:\path\to\srctree +Build dir: C:\path\to\buildtree +Build type: native build +Project name: featurenew +Project version: undefined +Build machine cpu family: x86_64 +Build machine cpu: x86_64 +WARNING: Project targeting '>=0.43' but tried to use feature introduced in '0.44.0': configuration_data.get_unquoted() +Message: bar +Build targets in project: 0 +WARNING: Project specifies a minimum meson_version '>=0.43' which conflicts with: + * 0.44.0: {'configuration_data.get_unquoted()'} +``` + +## New type of build option for features + +A new type of [option called `feature`](Build-options.md#features) can +be defined in `meson_options.txt` for the traditional `enabled / +disabled / auto` tristate. The value of this option can be passed to +the `required` keyword argument of functions `dependency()`, +`find_library()`, `find_program()` and `add_languages()`. + +A new global option `auto_features` has been added to override the +value of all `auto` features. It is intended to be used by packagers +to have full control on which feature must be enabled or disabled. + +## New options to `gnome.gdbus_codegen()` + +You can now pass additional arguments to gdbus-codegen using the +`extra_args` keyword. This is the same for the other gnome function +calls. + +Meson now automatically adds autocleanup support to the generated +code. This can be modified by setting the autocleanup keyword. + +For example: + +```meson +sources += gnome.gdbus_codegen('com.mesonbuild.Test', + 'com.mesonbuild.Test.xml', + autocleanup : 'none', + extra_args : ['--pragma-once']) +``` + +## Made 'install' a top level Meson command + +You can now run `meson install` in your build directory and it will do +the install. It has several command line options you can toggle the +behaviour that is not in the default `ninja install` invocation. This +is similar to how `meson test` already works. + +For example, to install only the files that have changed, you can do: + +```console +$ meson install --only-changed +``` + +## `install_mode:` keyword argument extended to all installable targets + +It is now possible to pass an `install_mode` argument to all +installable targets, such as `executable()`, libraries, headers, man +pages and custom/generated targets. + +The `install_mode` argument can be used to specify the file mode in +symbolic format and optionally the owner/uid and group/gid for the +installed files. + +## New built-in option `install_umask` with a default value 022 + +This umask is used to define the default permissions of files and +directories created in the install tree. Files will preserve their +executable mode, but the exact permissions will obey the +`install_umask`. + +The `install_umask` can be overridden in the Meson command-line: + +```console +$ meson --install-umask=027 builddir/ +``` + +A project can also override the default in the `project()` call: + +```meson +project('myproject', 'c', + default_options : ['install_umask=027']) +``` + +To disable the `install_umask`, set it to `preserve`, in which case +permissions are copied from the files in their origin. + +## Octal and binary string literals + +Octal and binary integer literals can now be used in build and option files. + +```meson +int_493 = 0o755 +int_1365 = 0b10101010101 +``` + +## New keyword arguments: 'check' and 'capture' for `run_command()` + +If `check:` is `true`, then the configuration will fail if the command +returns a non-zero exit status. The default value is `false` for +compatibility reasons. + +`run_command()` used to always capture the output and stored it for +use in build files. However, sometimes the stdout is in a binary +format which is meant to be discarded. For that case, you can now set +the `capture:` keyword argument to `false`. + +## Windows resource files dependencies + +The `compile_resources()` function of the `windows` module now takes +the `depend_files:` and `depends:` keywords. + +When using binutils's `windres`, dependencies on files `#include`'d by +the preprocessor are now automatically tracked. + +## Polkit support for privileged installation + +When running `install`, if installation fails with a permission error +and `pkexec` is available, Meson will attempt to use it to spawn a +permission dialog for privileged installation and retry the +installation. + +If `pkexec` is not available, the old behaviour is retained and you +will need to explicitly run the install step with `sudo`. diff --git a/meson/docs/markdown/Release-notes-for-0.48.0.md b/meson/docs/markdown/Release-notes-for-0.48.0.md new file mode 100644 index 000000000..4b68b6d7c --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.48.0.md @@ -0,0 +1,343 @@ +--- +title: Release 0.48 +short-description: Release notes for 0.48 +... + +# New features + +## Toggles for build type, optimization and vcrt type + +Since the very beginning Meson has provided different project types to +use, such as *debug* and *minsize*. There is also a *plain* type that +adds nothing by default but instead makes it the user's responsibility +to add everything by hand. This works but is a bit tedious. + +In this release we have added new new options to manually toggle e.g. +optimization levels and debug info so those can be changed +independently of other options. For example by default the debug +buildtype has no optmization enabled at all. If you wish to use GCC's +`-Og` instead, you could set it with the following command: + +``` +meson configure -Doptimization=g +``` + +Similarly we have added a toggle option to select the version of +Visual Studio C runtime to use. By default it uses the debug runtime +DLL debug builds and release DLL for release builds but this can be +manually changed with the new base option `b_vscrt`. + +## Meson warns if two calls to `configure_file()` write to the same file + +If two calls to +[`configure_file()`](Reference-manual.md#configure_file) write to the +same file Meson will print a `WARNING:` message during configuration. +For example: + +```meson +project('configure_file', 'cpp') + +configure_file( + input: 'a.in', + output: 'out', + command: ['./foo.sh'] +) +configure_file( + input: 'a.in', + output: 'out', + command: ['./foo.sh'] +) +``` + +This will output: + +``` +The Meson build system +Version: 0.47.0.dev1 +Source dir: /path/to/srctree +Build dir: /path/to/buildtree +Build type: native build +Project name: configure_file +Project version: undefined +Build machine cpu family: x86_64 +Build machine cpu: x86_64 +Configuring out with command +WARNING: Output file out for configure_file overwritten. First time written in line 3 now in line 8 +Configuring out with command +Build targets in project: 0 +Found ninja-1.8.2 at /usr/bin/ninja +``` + +## New kwarg `console` for `custom_target()` + +This keyword argument conflicts with `capture`, and is meant for +commands that are resource-intensive and take a long time to +finish. With the Ninja backend, setting this will add this target to +[Ninja's `console` +pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool), +which has special properties such as not buffering stdout and +serializing all targets in this pool. + +The primary use-case for this is to be able to run external commands +that take a long time to exeute. Without setting this, the user does +not receive any feedback about what the program is doing. + +## `dependency(version:)` now applies to all dependency types + +Previously, version constraints were only enforced for dependencies +found using the pkg-config dependency provider. These constraints now +apply to dependencies found using any dependency provider. + +Some combinations of dependency, host and method do not currently +support discovery of the version. In these cases, the dependency will +not be found if a version constraint is applied, otherwise the +`version()` method for the dependency object will return `'unknown'`. + +(If discovering the version in one of these combinations is important +to you, and a method exists to determine the version in that case, +please file an issue with as much information as possible.) + +## python3 module is deprecated + +A generic module `python` has been added in Meson `0.46.0` and has a superset of +the features implemented by the previous `python3` module. + +In most cases, it is a simple matter of renaming: +```meson +py3mod = import('python3') +python = py3mod.find_python() +``` + +becomes + +```meson +pymod = import('python') +python = pymod.find_installation() +``` + +## Dictionary addition + +Dictionaries can now be added, values from the second dictionary overrides values +from the first + +```meson +d1 = {'a' : 'b'} +d3 = d1 + {'a' : 'c'} +d3 += {'d' : 'e'} +``` + +## Dist scripts + +You can now specify scripts that are run as part of the `dist` +target. An example usage would go like this: + +```meson +project('foo', 'c') + +# other stuff here + +meson.add_dist_script('dist_cleanup.py') +``` + +## Fatal warnings + +A new command line option has been added: `--fatal-meson-warnings`. +When enabled, any warning message printed by Meson will be fatal and +raise an exception. It is intended to be used by developers and CIs to +easily catch deprecation warnings, or any other potential issues. + +## Helper methods added for checking GNU style attributes: `__attribute__(...)` + +A set of new helpers have been added to the C and C++ compiler objects +for checking GNU style function attributes. These are not just simpler +to use, they may be optimized to return fast on compilers that don't +support these attributes. Currently this is true for MSVC. + +```meson +cc = meson.get_compiler('c') +if cc.has_function_attribute('aligned') + add_project_arguments('-DHAVE_ALIGNED', language : 'c') +endif +``` + +Would replace code like: + +```meson +if cc.compiles('''into foo(void) __attribute__((aligned(32)))''') + add_project_arguments('-DHAVE_ALIGNED', language : 'c') +endif +``` + +Additionally, a multi argument version has been added: + +```meson +foreach s : cc.get_supported_function_attributes(['hidden', 'alias']) + add_project_arguments('-DHAVE_@0@'.format(s.to_upper()), language : 'c') +endforeach +``` + +## `gnome.generate_gir()` now optionally accepts multiple libraries + +The GNOME module can now generate a single gir for multiple libraries, +which is something `g-ir-scanner` supported, but had not been exposed +yet. + +gnome.generate_gir() will now accept multiple positional arguments, if +none of these arguments are an `Executable` instance. + +## Hotdoc module + +A new module has been written to ease generation of +[hotdoc](https://hotdoc.github.io/) based documentation. It supports +complex use cases such as hotdoc subprojects (to create documentation +portals) and makes it straight forward to leverage full capabilities +of hotdoc. + +Simple usage: + +``` meson +hotdoc = import('hotdoc') + +hotdoc.generate_doc( + 'foobar', + c_smart_index: true, + project_version: '0.1', + sitemap: 'sitemap.txt', + index: 'index.md', + c_sources: ['path/to/file.c'], + languages: ['c'], + install: true, +) +``` + +## `i18n.merge_file()` now fully supports variable substitutions defined in `custom_target()` + +Filename substitutions like @BASENAME@ and @PLAINNAME@ were previously +accepted but the name of the build target wasn't altered leading to +colliding target names when using the substitution twice. +i18n.merge_file() now behaves as custom_target() in this regard. + +## Projects args can be set separately for cross and native builds (potentially breaking change) + +It has been a longstanding bug (or let's call it a "delayed bug fix") +that if you do this: + +```meson +add_project_arguments('-DFOO', language : 'c') +``` + +Then the flag is used both in native and cross compilations. This is +very confusing and almost never what you want. To fix this a new +keyword `native` has been added to all functions that add arguments, +namely `add_global_arguments`, `add_global_link_arguments`, +`add_project_arguments` and `add_project_link_arguments` that behaves +like the following: + +```meson +# Added to native builds when compiling natively and to cross +# compilations when doing cross compiles. +add_project_arguments(...) + +# Added only to native compilations, not used in cross compilations. +add_project_arguments(..., native : true) + +# Added only to cross compilations, not used in native compilations. +add_project_arguments(..., native : false) +``` + +Also remember that cross compilation is a property of each target. +There can be target that are compiled with the native compiler and +some which are compiled with the cross compiler. + +Unfortunately this change is backwards incompatible and may cause some +projects to fail building. However this should be very rare in +practice. + +## More flexible `override_find_program()`. + +It is now possible to pass an `executable` to +`override_find_program()` if the overridden program is not used during +configure. + +This is particularly useful for fallback dependencies like Protobuf +that also provide a tool like protoc. + +## `shared_library()` now supports setting dylib compatibility and current version + +Now, by default `shared_library()` sets `-compatibility_version` and +`-current_version` of a macOS dylib using the `soversion`. + +This can be overridden by using the `darwin_versions:` kwarg to +[`shared_library()`](Reference-manual.md#shared_library). As usual, +you can also pass this kwarg to `library()` or `build_target()` and it +will be used in the appropriate circumstances. + +## Version comparison + +`dependency(version:)` and other version constraints now handle +versions containing non-numeric characters better, comparing versions +using the rpmvercmp algorithm (as using the `pkg-config` autoconf +macro `PKG_CHECK_MODULES` does). + +This is a breaking change for exact comparison constraints which rely +on the previous comparison behaviour of extending the compared +versions with `'0'` elements, up to the same length of `'.'`-separated +elements. + +For example, a version of `'0.11.0'` would previously match a version +constraint of `'==0.11'`, but no longer does, being instead considered +strictly greater. + +Instead, use a version constraint which exactly compares with the +precise version required, e.g. `'==0.11.0'`. + +## Keyword argument for GNU symbol visibility + +Build targets got a new keyword, `gnu_symbol_visibility` that controls +how symbols are exported from shared libraries. This is most commonly +used to hide implementation symbols like this: + +```meson +shared_library('mylib', ... + gnu_symbol_visibility: 'hidden') +``` + +In this case only symbols explicitly marked as visible in the source +files get exported. + +## Git wraps can now clone submodules automatically + +To enable this, the following needs to be added to the `.wrap` file: + +```ini +clone-recursive=true +``` + +## `subproject()` function now supports the `required:` kwarg + +This allows you to declare an optional subproject. You can now call +`found()` on the return value of the `subproject()` call to see if the +subproject is available before calling `get_variable()` to fetch +information from it. + +## `dependency()` objects now support the `.name()` method + +You can now fetch the name of the dependency that was searched like +so: + +```meson +glib_dep = dependency('glib-2.0') +... +message("dependency name is " + glib_dep.name()) +# This outputs `dependency name is glib-2.0` + +qt_dep = dependency('qt5') +... +message("dependency name is " + qt_dep.name()) +# This outputs `dependency name is qt5` + +decl_dep = declare_dependency() +... +message("dependency name is " + decl_dep.name()) +# This outputs `dependency name is internal` +``` diff --git a/meson/docs/markdown/Release-notes-for-0.49.0.md b/meson/docs/markdown/Release-notes-for-0.49.0.md new file mode 100644 index 000000000..6b84af19c --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.49.0.md @@ -0,0 +1,327 @@ +--- +title: Release 0.49 +short-description: Release notes for 0.49 +... + +# New features + +## Libgcrypt dependency now supports libgcrypt-config + +Earlier, `dependency('libgcrypt')` could only detect the library with +pkg-config files. Now, if pkg-config files are not found, Meson will +look for `libgcrypt-config` and if it's found, will use that to find +the library. + +## New `section` key for the buildoptions introspection + +Meson now has a new `section` key in each build option. This allows +IDEs to group these options similar to `meson configure`. + +The possible values for `section` are: + + - core + - backend + - base + - compiler + - directory + - user + - test + +## CC-RX compiler for C and CPP + +Cross-compilation is now supported for Renesas RX targets with the +CC-RX compiler. + +The environment path should be set properly for the CC-RX compiler +executables. The `-cpu` option with the appropriate value should be +mentioned in the cross-file as shown in the snippet below. + +```ini +[properties] +c_args = ['-cpu=rx600'] +cpp_args = ['-cpu=rx600'] +``` + +The default extension of the executable output is `.abs`. Other target +specific arguments to the compiler and linker will need to be added +explicitly from the +cross-file(`c_args`/`c_link_args`/`cpp_args`/`cpp_link_args`) or some +other way. Refer to the CC-RX User's manual for additional compiler +and linker options. + +## CMake `find_package` dependency backend + +Meson can now use the CMake `find_package` ecosystem to detect +dependencies. Both the old-style `_LIBRARIES` variables as well +as imported targets are supported. Meson can automatically guess the +correct CMake target in most cases but it is also possible to manually +specify a target with the `modules` property. + +```meson +# Implicitly uses CMake as a fallback and guesses a target +dep1 = dependency('KF5TextEditor') + +# Manually specify one or more CMake targets to use +dep2 = dependency('ZLIB', method : 'cmake', modules : ['ZLIB::ZLIB']) +``` + +CMake is automatically used after `pkg-config` fails when +no `method` (or `auto`) was provided in the dependency options. + +## New compiler method `get_argument_syntax` + +The compiler object now has `get_argument_syntax` method, which +returns a string value of `gcc`, `msvc`, or an undefined value string +value. This can be used to determine if a compiler uses gcc syntax +(`-Wfoo`), msvc syntax (`/w1234`), or some other kind of arguments. + +```meson +cc = meson.get_compiler('c') + +if cc.get_argument_syntax() == 'msvc' + if cc.has_argument('/w1235') + add_project_arguments('/w1235', language : ['c']) + endif +elif cc.get_argument_syntax() == 'gcc' + if cc.has_argument('-Wfoo') + add_project_arguments('-Wfoo', language : ['c']) + endif +elif cc.get_id() == 'some other compiler' + add_project_arguments('--error-on-foo', language : ['c']) +endif +``` + +## Return `Disabler()` instead of not-found object + +Functions such as `dependency()`, `find_library()`, `find_program()`, +and `python.find_installation()` have a new keyword argument: +`disabler`. When set to `true` those functions return `Disabler()` +objects instead of not-found objects. + +## `introspect --projectinfo` can now be used without configured build directory + +This allows IDE integration to get information about the project +before the user has configured a build directory. + +Before you could use `meson.py introspect --projectinfo +build-directory`. Now you also can use `meson.py introspect +--projectinfo project-dir/meson.build`. + +The output is similar to the output with a build directory but +additionally also includes information from `introspect +--buildsystem-files`. + +For example `meson.py introspect --projectinfo test\ cases/common/47\ +subproject\ options/meson.build` This outputs (pretty printed for +readability): + +``` +{ + "buildsystem_files": [ + "meson_options.txt", + "meson.build" + ], + "name": "suboptions", + "version": null, + "descriptive_name": "suboptions", + "subprojects": [ + { + "buildsystem_files": [ + "subprojects/subproject/meson_options.txt", + "subprojects/subproject/meson.build" + ], + "name": "subproject", + "version": "undefined", + "descriptive_name": "subproject" + } + ] +} +``` + +Both usages now include a new `descriptive_name` property which always +shows the name set in the project. + +## Can specify keyword arguments with a dictionary + +You can now specify keyword arguments for any function and method call +with the `kwargs` keyword argument. This is perhaps best described +with an example: + +```meson +options = {'include_directories': include_directories('inc')} + +... + +executable(... + kwargs: options) +``` + +The above code is identical to this: + +```meson +executable(... + include_directories: include_directories('inc')) +``` + +That is, Meson will expand the dictionary given to `kwargs` as if the +entries in it had been given as keyword arguments directly. + +Note that any individual argument can be specified either directly or +with the `kwarg` dict but not both. If a key is specified twice, it +is a hard error. + +## Manpages are no longer compressed implicitly + +Earlier, the `install_man` command has automatically compressed +installed manpages into `.gz` format. This collided with manpage +compression hooks already used by various distributions. Now, manpages +are installed uncompressed and distributors are expected to handle +compressing them according to their own compression preferences. + +## Native config files + +Native files (`--native-file`) are the counterpart to cross files +(`--cross-file`), and allow specifying information about the build +machine, both when cross compiling and when not. + +Currently the native files only allow specifying the names of +binaries, similar to the cross file, for example: + +```ini +[binaries] +llvm-config = "/opt/llvm-custom/bin/llvm-config" +``` + +Will override the llvm-config used for *native* binaries. Targets for +the host machine will continue to use the cross file. + +## Foreach `break` and `continue` + +`break` and `continue` keywords can be used inside foreach loops. + +```meson +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach +# result is ['a', 'b'] +``` + +You can check if an array contains an element like this: +```meson +my_array = [1, 2] +if 1 in my_array +# This condition is true +endif +if 1 not in my_array +# This condition is false +endif +``` + +You can check if a dictionary contains a key like this: +```meson +my_dict = {'foo': 42, 'foo': 43} +if 'foo' in my_dict +# This condition is true +endif +if 42 in my_dict +# This condition is false +endif +if 'foo' not in my_dict +# This condition is false +endif +``` + +## Joining paths with / + +For clarity and conciseness, we recommend using the `/` operator to separate +path elements: + +```meson +joined = 'foo' / 'bar' +``` + +Before Meson 0.49, joining path elements was done with the legacy +`join_paths` function, but the `/` syntax above is now recommended. + +```meson +joined = join_paths('foo', 'bar') +``` + +This only works for strings. + +## Position-independent executables + +When `b_pie` option, or `executable()`'s `pie` keyword argument is set +to `true`, position-independent executables are built. All their +objects are built with `-fPIE` and the executable is linked with +`-pie`. Any static library they link must be built with `pic` set to +`true` (see `b_staticpic` option). + +## Deprecation warning in pkg-config generator + +All libraries passed to the `libraries` keyword argument of the +`generate()` method used to be associated with that generated +pkg-config file. That means that any subsequent call to `generate()` +where those libraries appear would add the filebase of the +`generate()` that first contained them into `Requires:` or +`Requires.private:` field instead of adding an `-l` to `Libs:` or +`Libs.private:`. + +This behaviour is now deprecated. The library that should be +associated with the generated pkg-config file should be passed as +first positional argument instead of in the `libraries` keyword +argument. The previous behaviour is maintained but prints a +deprecation warning and support for this will be removed in a future +Meson release. If you can not create the needed pkg-config file +without this warning, please file an issue with as much details as +possible about the situation. + +For example this sample will write `Requires: liba` into `libb.pc` but +print a deprecation warning: + +```meson +liba = library(...) +pkg.generate(libraries : liba) + +libb = library(...) +pkg.generate(libraries : [liba, libb]) +``` + +It can be fixed by passing `liba` as first positional argument:: +```meson +liba = library(...) +pkg.generate(liba) + +libb = library(...) +pkg.generate(libb, libraries : [liba]) +``` + +## Subprojects download, checkout, update command-line + +New command-line tool has been added to manage subprojects: + +- `meson subprojects download` to download all subprojects that have a wrap file. +- `meson subprojects update` to update all subprojects to latest version. +- `meson subprojects checkout` to checkout or create a branch in all git subprojects. + +## New keyword argument `is_default` to `add_test_setup()` + +The keyword argument `is_default` may be used to set whether the test +setup should be used by default whenever `meson test` is run without +the `--setup` option. + +```meson +add_test_setup('default', is_default: true, env: 'G_SLICE=debug-blocks') +add_test_setup('valgrind', env: 'G_SLICE=always-malloc', ...) +test('mytest', exe) +``` + +For the example above, running `meson test` and `meson test +--setup=default` is now equivalent. diff --git a/meson/docs/markdown/Release-notes-for-0.50.0.md b/meson/docs/markdown/Release-notes-for-0.50.0.md new file mode 100644 index 000000000..0f7dbb878 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.50.0.md @@ -0,0 +1,350 @@ +--- +title: Release 0.50.0 +short-description: Release notes for 0.50.0 +... + +# New features + +## Added `cmake_module_path` and `cmake_args` to dependency + +The CMake dependency backend can now make use of existing +`Find.cmake` files by setting the `CMAKE_MODULE_PATH` with the +new `dependency()` property `cmake_module_path`. The paths given to +`cmake_module_path` should be relative to the project source +directory. + +Furthermore the property `cmake_args` was added to give CMake +additional parameters. + +## Added PGI compiler support + +Nvidia / PGI C, C++ and Fortran +[no-cost](https://www.pgroup.com/products/community.htm) compilers are +now supported. They have been tested on Linux so far. + + + +## Fortran Coarray + +Fortran 2008 / 2018 coarray support was added via `dependency('coarray')` + +## Libdir defaults to `lib` when cross compiling + +Previously `libdir` defaulted to the value of the build machine such +as `lib/x86_64-linux-gnu`, which is almost always incorrect when cross +compiling. It now defaults to plain `lib` when cross compiling. Native +builds remain unchanged and will point to the current system's library +dir. + +## Native and Cross File Paths and Directories + +A new `[paths]` section has been added to native and cross files. This +can be used to set paths such a prefix and libdir in a persistent way. + +## Add warning_level 0 option + +Adds support for a warning level 0 which does not enable any static +analysis checks from the compiler + +## A builtin target to run clang-format + +If you have `clang-format` installed and there is a `.clang-format` +file in the root of your master project, Meson will generate a run +target called `clang-format` so you can reformat all files with one +command: + +```meson +ninja clang-format +``` + + +## Added `.path()` method to object returned by `python.find_installation()` + +`ExternalProgram` objects as well as the object returned by the +`python3` module provide this method, but the new `python` module did +not. + +## Fix ninja console log from generators with multiple output nodes + +This resolves [issue +#4760](https://github.com/mesonbuild/meson/issues/4760) where a +generator with multiple output nodes printed an empty string to the +console + +## `introspect --buildoptions` can now be used without configured build directory + +It is now possible to run `meson introspect --buildoptions /path/to/meson.build` +without a configured build directory. + +Running `--buildoptions` without a build directory produces the same +output as running it with a freshly configured build directory. + +However, this behavior is not guaranteed if subprojects are +present. Due to internal limitations all subprojects are processed +even if they are never used in a real Meson run. Because of this +options for the subprojects can differ. + +## `include_directories` accepts a string + +The `include_directories` keyword argument now accepts plain strings +rather than an include directory object. Meson will transparently +expand it so that a declaration like this: + +```meson +executable(..., include_directories: 'foo') +``` + +Is equivalent to this: + +```meson +foo_inc = include_directories('foo') +executable(..., include_directories: foo_inc) +``` + +## Fortran submodule support + +Initial support for Fortran `submodule` was added, where the submodule +is in the same or different file than the parent `module`. The +submodule hierarchy specified in the source Fortran code `submodule` +statements are used by Meson to resolve source file dependencies. For +example: + +```fortran +submodule (ancestor:parent) child +``` + + +## Add `subproject_dir` to `--projectinfo` introspection output + +This allows applications interfacing with Meson (such as IDEs) to know +about an overridden subproject directory. + +## Find library with its headers + +The `find_library()` method can now also verify if the library's +headers are found in a single call, using the `has_header()` method +internally. + +```meson +# Aborts if the 'z' library is found but not its header file +zlib = find_library('z', has_headers : 'zlib.h') +# Returns not-found if the 'z' library is found but not its header file +zlib = find_library('z', has_headers : 'zlib.h', required : false) +``` + +Any keyword argument with the `header_` prefix passed to +`find_library()` will be passed to the `has_header()` method with the +prefix removed. + +```meson +libfoo = find_library('foo', + has_headers : ['foo.h', 'bar.h'], + header_prefix : '#include ', + header_include_directories : include_directories('.')) +``` + +## NetCDF + +NetCDF support for C, C++ and Fortran is added via pkg-config. + +## Added the Flang compiler + +[Flang](https://github.com/flang-compiler/flang/releases) Fortran +compiler support was added. As with other Fortran compilers, flang is +specified using `FC=flang meson ..` or similar. + +## New `not_found_message` for `dependency()` + +You can now specify a `not_found_message` that will be printed if the +specified dependency was not found. The point is to convert constructs +that look like this: + +```meson +d = dependency('something', required: false) +if not d.found() + message('Will not be able to do something.') +endif +``` + +Into this: + +```meson +d = dependency('something', + required: false, + not_found_message: 'Will not be able to do something.') +``` + +Or constructs like this: + +```meson +d = dependency('something', required: false) +if not d.found() + error('Install something by doing XYZ.') +endif +``` + +into this: + +```meson +d = dependency('something', + not_found_message: 'Install something by doing XYZ.') +``` + +Which works, because the default value of `required` is `true`. + +## Cuda support + +Compiling Cuda source code is now supported, though only with the +Ninja backend. This has been tested only on Linux for now. + +Because NVidia's Cuda compiler does not produce `.d` dependency files, +dependency tracking does not work. + +## `run_command()` accepts `env` kwarg + +You can pass [`environment`](Reference-manual.md#environment-object) +object to [`run_command`](Reference-manual.md#run-command), just +like to `test`: + +```meson +env = environment() +env.set('FOO', 'bar') +run_command('command', 'arg1', 'arg2', env: env) +``` + +## `extract_objects:` accepts `File` arguments + +The `extract_objects` function now supports File objects to tell it +what to extract. Previously, file paths could only be passed as strings. + +## Changed the JSON format of the introspection + +All paths used in the Meson introspection JSON format are now +absolute. This affects the `filename` key in the targets introspection +and the output of `--buildsystem-files`. + +Furthermore, the `filename` and `install_filename` keys in the targets +introspection are now lists of strings with identical length. + +The `--target-files` option is now deprecated, since the same information +can be acquired from the `--tragets` introspection API. + +## Meson file rewriter + +This release adds the functionality to perform some basic modification +on the `meson.build` files from the command line. The currently +supported operations are: + +- For build targets: + - Add/Remove source files + - Add/Remove targets + - Modify a select set of kwargs + - Print some JSON information +- For dependencies: + - Modify a select set of kwargs +- For the project function: + - Modify a select set of kwargs + - Modify the default options list + +For more information see the rewriter documentation. + +## `introspect --scan-dependencies` can now be used to scan for dependencies used in a project + +It is now possible to run `meson introspect --scan-dependencies +/path/to/meson.build` without a configured build directory to scan for +dependencies. + +The output format is as follows: + +```json +[ + { + "name": "The name of the dependency", + "required": true, + "conditional": false, + "has_fallback": false + } +] +``` + +The `required` keyword specifies whether the dependency is marked as +required in the `meson.build` (all dependencies are required by +default). The `conditional` key indicates whether the `dependency()` +function was called inside a conditional block. In a real Meson run +these dependencies might not be used, thus they _may_ not be required, +even if the `required` key is set. The `has_fallback` key just +indicates whether a fallback was directly set in the `dependency()` +function. + +## `introspect --targets` can now be used without configured build directory + +It is now possible to run `meson introspect --targets +/path/to/meson.build` without a configured build directory. + +The generated output is similar to running the introspection with a +build directory. However, there are some key differences: + +- The paths in `filename` now are _relative_ to the future build directory +- The `install_filename` key is completely missing +- There is only one entry in `target_sources`: + - With the language set to `unknown` + - Empty lists for `compiler` and `parameters` and `generated_sources` + - The `sources` list _should_ contain all sources of the target + +There is no guarantee that the sources list in `target_sources` is +correct. There might be differences, due to internal limitations. It +is also not guaranteed that all targets will be listed in the output. +It might even be possible that targets are listed, which won't exist +when Meson is run normally. This can happen if a target is defined +inside an if statement. Use this feature with care. + +## Added option to introspect multiple parameters at once + +Meson introspect can now print the results of multiple introspection +commands in a single call. The results are then printed as a single +JSON object. + +The format for a single command was not changed to keep backward +compatibility. + +Furthermore the option `-a,--all`, `-i,--indent` and +`-f,--force-object-output` were added to print all introspection +information in one go, format the JSON output (the default is still +compact JSON) and force use the new output format, even if only one +introspection command was given. + +A complete introspection dump is also stored in the `meson-info` +directory. This dump will be (re)generated each time meson updates the +configuration of the build directory. + +Additionlly the format of `meson introspect target` was changed: + + - New: the `sources` key. It stores the source files of a target and their compiler parameters. + - New: the `defined_in` key. It stores the Meson file where a target is defined + - New: the `subproject` key. It stores the name of the subproject where a target is defined. + - Added new target types (`jar`, `shared module`). + +## `meson configure` can now print the default options of an unconfigured project + +With this release, it is also possible to get a list of all build +options by invoking `meson configure` with the project source +directory or the path to the root `meson.build`. In this case, Meson +will print the default values of all options. + +## HDF5 + +HDF5 support is added via pkg-config. + +## Added the `meson-info.json` introspection file + +Meson now generates a `meson-info.json` file in the `meson-info` +directory to provide introspection information about the latest Meson +run. This file is updated when the build configuration is changed and +the build files are (re)generated. + +## New kwarg `install:` for `configure_file()` + +Previously when using `configure_file()`, you could install the +outputted file by setting the `install_dir:` keyword argument. Now, +there is an explicit kwarg `install:` to enable/disable it. Omitting +it will maintain the old behaviour. diff --git a/meson/docs/markdown/Release-notes-for-0.51.0.md b/meson/docs/markdown/Release-notes-for-0.51.0.md new file mode 100644 index 000000000..cd94f6aff --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.51.0.md @@ -0,0 +1,337 @@ +--- +title: Release 0.51.0 +short-description: Release notes for 0.51.0 +... + +# New features + +## (C) Preprocessor flag handling + +Meson previously stored `CPPFLAGS` and per-language compilation flags +separately. (That latter would come from `CFLAGS`, `CXXFLAGS`, etc., +along with `_args` options whether specified no the command-line +interface (`-D..`), `meson.build` (`default_options`), or cross file +(`[properties]`).) This was mostly unobservable, except for certain +preprocessor-only checks like `check_header` would only use the +preprocessor flags, leading to confusion if some `-isystem` was in +`CFLAGS` but not `CPPFLAGS`. Now, they are lumped together, and +`CPPFLAGS`, for the languages which are deemed to care to about, is +just another source of compilation flags along with the others already +listed. + +## Sanity checking compilers with user flags + +Sanity checks previously only used user-specified flags for cross +compilers, but now do in all cases. + +All compilers Meson might decide to use for the build are "sanity +checked" before other tests are run. This usually involves building +simple executable and trying to run it. Previously user flags +(compilation and/or linking flags) were used for sanity checking cross +compilers, but not native compilers. This is because such flags might +be essential for a cross binary to succeed, but usually aren't for a +native compiler. + +In recent releases, there has been an effort to minimize the +special-casing of cross or native builds so as to make building more +predictable in less-tested cases. Since this the user flags are +necessary for cross, but not harmful for native, it makes more sense +to use them in all sanity checks than use them in no sanity checks, so +this is what we now do. + +## New `sourceset` module + +A new module, `sourceset`, was added to help building many binaries +from the same source files. Source sets associate source files and +dependencies to keys in a `configuration_data` object or a dictionary; +they then take multiple `configuration_data` objects or dictionaries, +and compute the set of source files and dependencies for each of those +configurations. + +## n_debug=if-release and buildtype=plain means no asserts + +Previously if this combination was used then assertions were enabled, +which is fairly surprising behavior. + +## `target_type` in `build_targets` accepts the value 'shared_module' + +The `target_type` keyword argument in `build_target()` now accepts the +value `'shared_module'`. + +The statement + +```meson +build_target(..., target_type: 'shared_module') +``` + +is equivalent to this: + +```meson +shared_module(...) +``` + +## New modules kwarg for python.find_installation + +This mirrors the modules argument that some kinds of dependencies +(such as qt, llvm, and cmake based dependencies) take, allowing you to +check that a particular module is available when getting a python +version. + +```meson +py = import('python').find_installation('python3', modules : ['numpy']) +``` + +## Support for the Intel Compiler on Windows (ICL) + +Support has been added for ICL.EXE and ifort on windows. The support +should be on part with ICC support on Linux/MacOS. The ICL C/C++ +compiler behaves like Microsoft's CL.EXE rather than GCC/Clang like +ICC does, and has a different id, `intel-cl` to differentiate it. + +```meson +cc = meson.get_compiler('c') +if cc.get_id == 'intel-cl' + add_project_argument('/Qfoobar:yes', language : 'c') +endif +``` + +## Added basic support for the Xtensa CPU toolchain + +You can now use `xt-xcc`, `xt-xc++`, `xt-nm`, etc... on your cross +compilation file and Meson won't complain about an unknown toolchain. + + +## Dependency objects now have a get_variable method + +This is a generic replacement for type specific variable getters such as +`ConfigToolDependency.get_configtool_variable` and +`PkgConfigDependency.get_pkgconfig_variable`, and is the only way to query +such variables from cmake dependencies. + +This method allows you to get variables without knowing the kind of +dependency you have. + +```meson +dep = dependency('could_be_cmake_or_pkgconfig') +# cmake returns 'YES', pkg-config returns 'ON' +if ['YES', 'ON'].contains(dep.get_variable(pkgconfig : 'var-name', cmake : 'COP_VAR_NAME', default_value : 'NO')) + error('Cannot build your project when dep is built with var-name support') +endif +``` + +## CMake prefix path overrides + +When using pkg-config as a dependency resolver we can pass +`-Dpkg_config_path=$somepath` to extend or overwrite where pkg-config +will search for dependencies. Now cmake can do the same, as long as +the dependency uses a ${Name}Config.cmake file (not a +Find{$Name}.cmake file), by passing +`-Dcmake_prefix_path=list,of,paths`. It is important that point this +at the prefix that the dependency is installed into, not the cmake +path. + +If you have installed something to `/tmp/dep`, which has a layout like: +``` +/tmp/dep/lib/cmake +/tmp/dep/bin +``` + +then invoke Meson as `meson builddir/ -Dcmake_prefix_path=/tmp/dep` + +## Tests that should fail but did not are now errors + +You can tag a test as needing to fail like this: + +```meson +test('shoulfail', exe, should_fail: true) +``` + +If the test passes the problem is reported in the error logs but due +to a bug it was not reported in the test runner's exit code. Starting +from this release the unexpected passes are properly reported in the +test runner's exit code. This means that test runs that were passing +in earlier versions of Meson will report failures with the current +version. This is a good thing, though, since it reveals an error in +your test suite that has, until now, gone unnoticed. + +## New target keyword argument: `link_language` + +There may be situations for which the user wishes to manually specify +the linking language. For example, a C++ target may link C, Fortran, +etc. and perhaps the automatic detection in Meson does not pick the +desired compiler. The user can manually choose the linker by language +per-target like this example of a target where one wishes to link with +the Fortran compiler: + +```meson +executable(..., link_language : 'fortran') +``` + +A specific case this option fixes is where for example the main +program is Fortran that calls C and/or C++ code. The automatic +language detection of Meson prioritizes C/C++, and so an compile-time +error results like `undefined reference to main`, because the linker +is C or C++ instead of Fortran, which is fixed by this per-target +override. + +## New module to parse kconfig output files + +The new module `unstable-kconfig` adds the ability to parse and use +kconfig output files from `meson.build`. + + +## Add new `meson subprojects foreach` command + +`meson subprojects` has learned a new `foreach` command which accepts +a command with arguments and executes it in each subproject directory. + +For example this can be useful to check the status of subprojects +(e.g. with `git status` or `git diff`) before performing other actions +on them. + + +## Added c17 and c18 as c_std values for recent GCC and Clang Versions + +For gcc version 8.0 and later, the values c17, c18, gnu17, and gnu18 +were added to the accepted values for built-in compiler option c_std. + +For Clang version 10.0 and later on Apple OSX (Darwin), and for +version 7.0 and later on other platforms, the values c17 and gnu17 +were added as c_std values. + +## gpgme dependency now supports gpgme-config + +Previously, we could only detect GPGME with custom invocations of +`gpgme-config` or when the GPGME version was recent enough (>=1.13.0) +to install pkg-config files. Now we added support to Meson allowing us +to use `dependency('gpgme')` and fall back on `gpgme-config` parsing. + +## Can link against custom targets + +The output of `custom_target` and `custom_target[i]` can be used in +`link_with` and `link_whole` keyword arguments. This is useful for +integrating custom code generator steps, but note that there are many +limitations: + + - Meson can not know about link dependencies of the custom target. If + the target requires further link libraries, you need to add them manually + + - The user is responsible for ensuring that the code produced by + different toolchains are compatible. + + - `custom_target` may only be used when it has a single output file. + Use `custom_target[i]` when dealing with multiple output files. + + - The output file must have the correct file name extension. + + +## Removed the deprecated `--target-files` API + +The `--target-files` introspection API is now no longer available. The same +information can be queried with the `--targets` API introduced in 0.50.0. + +## Generators have a new `depends` keyword argument + +Generators can now specify extra dependencies with the `depends` +keyword argument. It matches the behaviour of the same argument in +other functions and specifies that the given targets must be built +before the generator can be run. This is used in cases such as this +one where you need to tell a generator to indirectly invoke a +different program. + +```meson +exe = executable(...) +cg = generator(program_runner, + output: ['@BASENAME@.c'], + arguments: ['--use-tool=' + exe.full_path(), '@INPUT@', '@OUTPUT@'], + depends: exe) +``` + +## Specifying options per mer machine + +Previously, no cross builds were controllable from the command line. +Machine-specific options like the pkg-config path and compiler options +only affected native targets, that is to say all targets in native +builds, and `native: true` targets in cross builds. Now, prefix the +option with `build.` to affect build machine targets, and leave it +unprefixed to affect host machine targets. + +For those trying to ensure native and cross builds to the same +platform produced the same result, the old way was frustrating because +very different invocations were needed to affect the same targets, if +it was possible at all. Now, the same command line arguments affect +the same targets everywhere --- Meson is closer to ignoring whether +the "overall" build is native or cross, and just caring about whether +individual targets are for the build or host machines. + + +## subproject.get_variable() now accepts a `fallback` argument + +Similar to `get_variable`, a fallback argument can now be passed to +`subproject.get_variable()`, it will be returned if the requested +variable name did not exist. + +``` meson +var = subproject.get_variable('does-not-exist', 'fallback-value') +``` + +## Add keyword `static` to `find_library` + +`find_library` has learned the `static` keyword. They keyword must be +a boolean, where `true` only searches for static libraries and `false` +only searches for dynamic/shared. Leaving the keyword unset will keep +the old behavior of first searching for dynamic and then falling back +to static. + +## Fortran `include` statements recursively parsed + +While non-standard and generally not recommended, some legacy Fortran +programs use `include` directives to inject code inline. Since v0.51, +Meson can handle Fortran `include` directives recursively. + +DO NOT list `include` files as sources for a target, as in general +their syntax is not correct as a standalone target. In general +`include` files are meant to be injected inline as if they were copy +and pasted into the source file. + +`include` was never standard and was superceded by Fortran 90 `module`. + +The `include` file is only recognized by Meson if it has a Fortran +file suffix, such as `.f` `.F` `.f90` `.F90` or similar. This is to +avoid deeply nested scanning of large external legacy C libraries that +only interface to Fortran by `include biglib.h` or similar. + +## CMake subprojects + +Meson can now directly consume CMake based subprojects with the +CMake module. + +Using CMake subprojects is similar to using the "normal" Meson +subprojects. They also have to be located in the `subprojects` +directory. + +Example: + +```cmake +add_library(cm_lib SHARED ${SOURCES}) +``` + +```meson +cmake = import('cmake') + +# Configure the CMake project +sub_proj = cmake.subproject('libsimple_cmake') + +# Fetch the dependency object +cm_lib = sub_proj.dependency('cm_lib') + +executable(exe1, ['sources'], dependencies: [cm_lib]) +``` + +It should be noted that not all projects are guaranteed to work. The +safest approach would still be to create a `meson.build` for the +subprojects in question. + +## Multiple cross files can be specified + +`--cross-file` can be passed multiple times, with the configuration files overlaying the same way as `--native-file`. diff --git a/meson/docs/markdown/Release-notes-for-0.52.0.md b/meson/docs/markdown/Release-notes-for-0.52.0.md new file mode 100644 index 000000000..05e2dc27e --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.52.0.md @@ -0,0 +1,252 @@ +--- +title: Release 0.52.0 +short-description: Release notes for 0.52.0 +... + +# New features + +## Gettext targets are ignored if `gettext` is not installed + +Previously the `i18n` module has errored out when `gettext` tools are +not installed on the system. Starting with this version they will +become no-ops instead. This makes it easier to build projects on +minimal environments (such as when bootstrapping) that do not have +translation tools installed. + +## Support taking environment values from a dictionary + +`environment()` now accepts a dictionary as first argument. If +provided, each key/value pair is added into the `environment_object` +as if `set()` method was called for each of them. + +On the various functions that take an `env:` keyword argument, you may +now give a dictionary. + +## alias_target + +``` meson +runtarget alias_target(target_name, dep1, ...) +``` + +This function creates a new top-level target. Like all top-level +targets, this integrates with the selected backend. For instance, with +Ninja you can run it as `ninja target_name`. This is a dummy target +that does not execute any command, but ensures that all dependencies +are built. Dependencies can be any build target (e.g. return value of +executable(), custom_target(), etc) + + +## Enhancements to the pkg_config_path argument + +Setting sys_root in the [properties] section of your cross file will +now set PKG_CONFIG_SYSROOT_DIR automatically for host system +dependencies when cross compiling. + +## The Meson test program now accepts an additional "--gdb-path" argument to specify the GDB binary + +`meson test --gdb testname` invokes GDB with the specific test case. However, sometimes GDB is not in the path or a GDB replacement is wanted. +Therefore, a `--gdb-path` argument was added to specify which binary is executed (per default `gdb`): + +```console +$ meson test --gdb --gdb-path /my/special/location/for/gdb testname +$ meson test --gdb --gdb-path cgdb testname +``` + +## Better support for illumos and Solaris + +illumos (and hopefully Solaris) support has been dramatically +improved, and one can reasonably expect projects to compile. + +## Splitting of Compiler.get_function_attribute('visibility') + +On macOS there is no `protected` visibility, which results in the +visbility check always failing. 0.52.0 introduces two changes to +improve this situation: + +1. the "visibility" check no longer includes "protected" +2. a new set of "split" checks are introduced which check for a single + attribute instead of all attributes. + +These new attributes are: +* visibility:default +* visibility:hidden +* visibility:internal +* visibility:protected + +## Clang-tidy target + +If `clang-tidy` is installed and the project's source root contains a +`.clang-tidy` (or `_clang-tidy`) file, Meson will automatically define +a `clang-tidy` target that runs Clang-Tidy on all source files. + +If you have defined your own `clang-tidy` target, Meson will not +generate its own target. + +## Add blocks dependency + +Add `dependency('blocks')` to use the Clang blocks extension. + +## Meson's builtin b_lundef is now supported on macOS + +This has always been possible, but there are some additional +restrictions on macOS (mainly do to Apple only features). With the +linker internal re-architecture this has become possible + +## Compiler and dynamic linker representation split + +0.52.0 inclues a massive refactor of the representaitons of compilers to +tease apart the representations of compilers and dynamic linkers (ld). This +fixes a number of compiler/linker combinations. In particular this fixes +use GCC and vanilla clang on macOS. + +## Add `depth` option to `wrap-git` + +To allow shallow cloning, an option `depth` has been added to `wrap-git`. +This applies recursively to submodules when `clone-recursive` is set to `true`. + +Note that the git server may have to be configured to support shallow cloning +not only for branches but also for tags. + +## Enhancements to the source_set module + +`SourceSet` objects now provide the `all_dependencies()` method, that +complement the existing `all_sources()` method. + +## added `--only test(s)` option to run_project_tests.py + +Individual tests or a list of tests from run_project_tests.py can be selected like: +``` +python run_project_tests.py --only fortran + +python run_project_tests.py --only fortran python3 +``` + +This assists Meson development by only running the tests for the +portion of Meson being worked on during local development. + +## Experimental Webassembly support via Emscripten + +Meson now supports compiling code to Webassembly using the Emscripten +compiler. As with most things regarding Webassembly, this support is +subject to change. + +## Version check in `find_program()` + +A new `version` keyword argument has been added to `find_program` to +specify the required version. See [`dependency()`](#dependency) for +argument format. The version of the program is determined by running +`program_name --version` command. If stdout is empty it fallbacks to +stderr. If the output contains more text than simply a version number, +only the first occurrence of numbers separated by dots is kept. If the +output is more complicated than that, the version checking will have +to be done manually using [`run_command()`](#run_command). + +## Added `vs_module_defs` to `shared_module()` + +Like `shared_library()`, `shared_module()` now accepts +`vs_module_defs` argument for controlling symbol exports, etc. + +## Improved support for static libraries + +Static libraries had numerous shortcomings in the past, especially +when using uninstalled static libraries. This release brings many +internal changes in the way they are handled, including: + +- `link_whole:` of static libraries. In the example below, lib2 used to miss + symbols from lib1 and was unusable. +```meson +lib1 = static_library(sources) +lib2 = static_library(other_sources, link_whole : lib1, install : true) +``` +- `link_with:` of a static library with an uninstalled static library. In the +example below, lib2 now implicitly promote `link_with:` to `link_whole:` because +the installed lib2 would oterhwise be unusable. +```meson +lib1 = static_library(sources, install : false) +lib2 = static_library(sources, link_with : lib1, install : true) +``` +- pkg-config generator do not include uninstalled static libraries. In the example + below, the generated `.pc` file used to be unusable because it contained + `Libs.private: -llib1` and `lib1.a` is not installed. `lib1` is now omitted + from the `.pc` file because the `link_with:` has been promoted to + `link_whole:` (see above) and thus lib1 is not needed to use lib2. +```meson +lib1 = static_library(sources, install : false) +lib2 = both_libraries(sources, link_with : lib1, install : true) +pkg.generate(lib2) +``` + +Many projects have been using `extract_all_objects()` to work around +those issues, and hopefully those hacks could now be removed. Since +this is a pretty large change, please double check if your static +libraries behave correctly, and report any regression. + +## Enhancements to the kconfig module + +`kconfig.load()` may now accept a `configure_file()` as input file. + +## Added `include_type` kwarg to `dependency` + +The `dependency()` function now has a `include_type` kwarg. It can take the +values `'preserve'`, `'system'` and `'non-system'`. If it is set to `'system'`, +all include directories of the dependency are marked as system dependencies. + +The default value of `include_type` is `'preserve'`. + +Additionally, it is also possible to check and change the +`include_type` state of an existing dependency object with the new +`include_type()` and `as_system()` methods. + +## Enhancements to `configure_file()` + +`input:` now accepts multiple input file names for `command:`-configured file. + +`depfile:` keyword argument is now accepted. The dependency file can +list all the additional files the configure target depends on. + +## Projects args can be set separately for build and host machines (potentially breaking change) + +Simplify `native` flag behavior in `add_global_arguments`, +`add_global_link_arguments`, `add_project_arguments` and +`add_project_link_arguments`. The rules are now very simple: + + - `native: true` affects `native: true` targets + + - `native: false` affects `native: false` targets + + - No native flag is the same as `native: false` + +This further simplifies behavior to match the "build vs host" decision +done in last release with `c_args` vs `build_c_args`. The underlying +motivation in both cases is to execute the same commands whether the +overall build is native or cross. + +## Allow checking if a variable is a disabler + +Added the function `is_disabler(var)`. Returns true if a variable is a disabler +and false otherwise. + + +## gtkdoc-check support + +`gnome.gtkdoc()` now has a `check` keyword argument. If `true` runs it +will run `gtkdoc-check` when running unit tests. Note that this has +the downside of rebuilding the doc for each build, which is often very +slow. It usually should be enabled only in CI. + +## `gnome.gtkdoc()` returns target object + +`gnome.gtkdoc()` now returns a target object that can be passed as +dependency to other targets using generated doc files (e.g. in +`content_files` of another doc). + +## Dist is now a top level command + +Previously creating a source archive could only be done with `ninja +dist`. Starting with this release Meson provides a top level `dist` +that can be invoked directly. It also has a command line option to +determine which kinds of archives to create: + +```meson +meson dist --formats=xztar,zip +``` diff --git a/meson/docs/markdown/Release-notes-for-0.53.0.md b/meson/docs/markdown/Release-notes-for-0.53.0.md new file mode 100644 index 000000000..73a71db57 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.53.0.md @@ -0,0 +1,219 @@ +--- +title: Release 0.53.0 +short-description: Release notes for 0.53.0 +... + +# New features + +## A new module for filesystem operations + +The new `fs` module can be used to examine the contents of the current +file system. + +```meson +fs = import('fs') +assert(fs.exists('important_file'), + 'The important file is missing.') +``` + +## meson dist --include-subprojects + +`meson dist` command line now gained `--include-subprojects` command +line option. When enabled, the source tree of all subprojects used by +the current build will also be included in the final tarball. This is +useful to distribute self contained tarball that can be built offline +(i.e. `--wrap-mode=nodownload`). + +## Added new Meson templates for `Dlang`, `Rust`, `Objective-C` + +Meson now ships with predefined project templates for `Dlang`, +`Fortran`, `Rust`, `Objective-C`, and by passing the associated flags `d`, +`fortran`, `rust`, `objc` to `meson init --language`. + +## Add a new summary() function + +A new function [`summary()`](Reference-manual.md#summary) has been +added to summarize build configuration at the end of the build +process. + +Example: +```meson +project('My Project', version : '1.0') +summary({'bindir': get_option('bindir'), + 'libdir': get_option('libdir'), + 'datadir': get_option('datadir'), + }, section: 'Directories') +summary({'Some boolean': false, + 'Another boolean': true, + 'Some string': 'Hello World', + 'A list': ['string', 1, true], + }, section: 'Configuration') +``` + +Output: +``` +My Project 1.0 + + Directories + prefix: /opt/gnome + bindir: bin + libdir: lib/x86_64-linux-gnu + datadir: share + + Configuration + Some boolean: False + Another boolean: True + Some string: Hello World + A list: string + 1 + True +``` + +## Generic Overrider for Dynamic Linker selection + +Previous to Meson 0.52.0 you set the dynamic linker using compiler +specific flags passed via language flags and hoped things worked out. +In version 0.52.0 Meson started detecting the linker and making +intelligent decisions about using it. Unfortunately this broke +choosing a non-default linker. + +Now there is a generic mechanism for doing this. In 0.53.0, you can +use the `LD` environment variable. **In 0.53.1** this was changed to +`_LD`, such as `CC_LD`, `CXX_LD`, `D_LD`, etc due +to regressions. The usual Meson [environment variable +rules](https://mesonbuild.com/Running-Meson.html#environment-variables) +apply. Alternatively, you can add the following to a cross or native +file: + +In 0.53.0: + +```ini +[binaries] +ld = 'gold' +``` + +**In 0.53.1 or newer**: + +```ini +[binaries] +c = 'gcc' +c_ld = 'gold' +``` + +```ini +[binaries] +c = 'clang' +c_ld = 'lld' +``` + +And Meson will select the linker if possible. + +## `fortran_std` option + +**new in 0.53.0** Akin to the `c_std` and `cpp_std` options, the +`fortran_std` option sets Fortran compilers to warn or error on +non-Fortran standard code. Only the Gfortran and Intel Fortran +compilers have support for this option. Other Fortran compilers ignore +the `fortran_std` option. + +Supported values for `fortran_std` include: + +* `legacy` for non-conforming code--this is especially important for Gfortran, which by default errors on old non-compliant Fortran code +* `f95` for Fortran 95 compliant code. +* `f2003` for Fortran 2003 compliant code. +* `f2008` for Fortran 2008 compliant code. +* `f2018` for Fortran 2018 compliant code. + +## python.dependency() embed kwarg + +Added the `embed` kwarg to the python module dependency function to +select the python library that can be used to embed python into an +application. + +## Scalapack + +added in **0.53.0**: + +```meson +scalapack = dependency('scalapack') +``` + +Historically and through today, typical Scalapack setups have broken +and incomplete pkg-config or FindScalapack.cmake. Meson handles +finding Scalapack on setups including: + +* Linux: Intel MKL or OpenMPI + Netlib +* MacOS: Intel MKL or OpenMPI + Netlib +* Windows: Intel MKL (OpenMPI not available on Windows) + +## Search directories for `find_program()` + +It is now possible to give a list of absolute paths where `find_program()` should +also search, using the `dirs` keyword argument. + +For example on Linux `/sbin` and `/usr/sbin` are not always in the `$PATH`: +```meson +prog = find_program('mytool', dirs : ['/usr/sbin', '/sbin']) +``` + +## Source tags targets + +When the respective tools are available, 'ctags', 'TAGS' and 'cscope' +targets will be generated by Meson, unless you have defined your own. + +## Dictionary entry using string variable as key + +Keys can now be any expression evaluating to a string value, not limited +to string literals any more. +```meson +d = {'a' + 'b' : 42} +k = 'cd' +d += {k : 43} +``` + +## Improved CMake subprojects support + +With this release even more CMake projects are supported via [CMake +subprojects](CMake-module.md#cmake-subprojects) due to these internal +improvements: + +- Use the CMake file API for CMake >=3.14 +- Handle the explicit dependencies via `add_dependency` +- Basic support for `add_custom_target` +- Improved `add_custom_command` support +- Object library support on Windows + +## compiler.get_linker_id() + +since 0.53.0, `compiler.get_linker_id()` allows retrieving a lowercase +name for the linker. Since each compiler family can typically use a +variety of linkers depending on operating system, this helps users +define logic for corner cases not otherwise easily handled. + +## CUDA dependency + +Native support for compiling and linking against the CUDA Toolkit +using the `dependency` function: + +```meson +project('CUDA test', 'cpp', meson_version: '>= 0.53.0') +exe = executable('prog', 'prog.cc', dependencies: dependency('cuda')) +``` + +See [the CUDA dependency](Dependencies.md#cuda) for more information. + +## Added global option to disable C++ RTTI + +The new boolean option is called `cpp_rtti`. + +## Introspection API changes + +dependencies (--dependencies, intro-dependencies.json): +- added the `version` key + +scanning dependencies (--scan-dependencies): +- added the `version` key containing the required dependency version + +tests and benchmarks (--tests, --benchmarks, intro-tests.json, +intro-benchmarks.json): +- added the `protocol` key diff --git a/meson/docs/markdown/Release-notes-for-0.54.0.md b/meson/docs/markdown/Release-notes-for-0.54.0.md new file mode 100644 index 000000000..7e9fbf225 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.54.0.md @@ -0,0 +1,394 @@ +--- +title: Release 0.54.0 +short-description: Release notes for 0.54.0 +... + +# New features + +## Emscripten (emcc) now supports threads + +In addition to properly setting the compile and linker arguments, a +new Meson builtin has been added to control the PTHREAD_POOL_SIZE +option, `-D_thread_count`, which may be set to any integer value +greater than 0. If it set to 0 then the PTHREAD_POOL_SIZE option will +not be passed. + +## Introduce dataonly for the pkgconfig module + +This allows users to disable writing out the inbuilt variables to the +pkg-config file as they might actually not be required. + +One reason to have this is for architecture-independent pkg-config +files in projects which also have architecture-dependent outputs. + +``` +pkgg.generate( + name : 'libhello_nolib', + description : 'A minimalistic pkgconfig file.', + version : libver, + dataonly: true +) +``` + +## Consistently report file locations relative to cwd + +The paths for filenames in error and warning locations are now +consistently reported relative to the current working directory (when +possible), or as absolute paths (when a relative path does not exist, +e.g. a Windows path starting with a different drive letter to the +current working directory). + +(The previous behaviour was to report a path relative to the source +root for all warnings and most errors, and relative to cwd for certain +parser errors) + +## `dependency()` consistency + +The first time a dependency is found, using `dependency('foo', ...)`, +the return value is now cached. Any subsequent call will return the +same value as long as version requested match, otherwise not-found +dependency is returned. This means that if a system dependency is +first found, it won't fallback to a subproject in a subsequent call +any more and will rather return not-found instead if the system +version does not match. Similarly, if the first call returns the +subproject fallback dependency, it will also return the subproject +dependency in a subsequent call even if no fallback is provided. + +For example, if the system has `foo` version 1.0: +```meson +# d2 is set to foo_dep and not the system dependency, even without fallback argument. +d1 = dependency('foo', version : '>=2.0', required : false, + fallback : ['foo', 'foo_dep']) +d2 = dependency('foo', version : '>=1.0', required : false) +``` +```meson +# d2 is not-found because the first call returned the system dependency, but its version is too old for 2nd call. +d1 = dependency('foo', version : '>=1.0', required : false) +d2 = dependency('foo', version : '>=2.0', required : false, + fallback : ['foo', 'foo_dep']) +``` + +## Override `dependency()` + +It is now possible to override the result of `dependency()` to point +to any dependency object you want. The overriding is global and +applies to every subproject from there on. + +For example, this subproject provides 2 libraries with version 2.0: + +```meson +project(..., version : '2.0') + +libfoo = library('foo', ...) +foo_dep = declare_dependency(link_with : libfoo) +meson.override_dependency('foo', foo_dep) + +libbar = library('bar', ...) +bar_dep = declare_dependency(link_with : libbar) +meson.override_dependency('bar', bar_dep) +``` + +Assuming the system has `foo` and `bar` 1.0 installed, and master project does this: +```meson +foo_dep = dependency('foo', version : '>=2.0', fallback : ['foo', 'foo_dep']) +bar_dep = dependency('bar') +``` + +This used to mix system 1.0 version and subproject 2.0 dependencies, +but thanks to the override `bar_dep` is now set to the subproject's +version instead. + +Another case this can be useful is to force a subproject to use a +specific dependency. If the subproject does `dependency('foo')` but +the main project wants to provide its own implementation of `foo`, it +can for example call `meson.override_dependency('foo', +declare_dependency(...))` before configuring the subproject. + +## Simplified `dependency()` fallback + +In the case a subproject `foo` calls +`meson.override_dependency('foo-2.0', foo_dep)`, the parent project +can omit the dependency variable name in fallback keyword argument: +`dependency('foo-2.0', fallback : 'foo')`. + +## Backend agnostic compile command + +A new `meson compile` command has been added to support backend +agnostic compilation. It accepts two arguments, `-j` and `-l`, which +are used if possible (`-l` does nothing with msbuild). A `-j` or `-l` +value < 1 lets the backend decide how many threads to use. For msbuild +this means `-m`, for ninja it means passing no arguments. + +```console +meson builddir --backend vs +meson compile -C builddir -j0 # this is the same as `msbuild builddir/my.sln -m` +``` + +```console +meson builddir +meson compile -C builddir -j3 # this is the same as `ninja -C builddir -j3` +``` + +Additionally `meson compile` provides a `--clean` switch to clean the +project. + +A complete list of arguments is always documented via `meson compile --help` + +## Native (build machine) compilers not always required + +`add_languages()` gained a `native:` keyword, indicating if a native or cross +compiler is to be used. + +For the benefit of existing simple build definitions which don't +contain any `native: true` targets, without breaking backwards +compatibility for build definitions which assume that the native +compiler is available after `add_languages()`, if the `native:` +keyword is absent the languages may be used for either the build or +host machine, but are never required for the build machine. + +This changes the behaviour of the following Meson fragment (when +cross-compiling but a native compiler is not available) from reporting +an error at `add_language` to reporting an error at `executable`. + +``` +add_language('c') +executable('main', 'main.c', native: true) +``` + +## Summary improvements + +A new `list_sep` keyword argument has been added to `summary()` +function. If defined and the value is a list, elements will be +separated by the provided string instead of being aligned on a new +line. + +The automatic `subprojects` section now also print the number of +warnings encountered during that subproject configuration, or the +error message if the configuration failed. + +## Add a system type dependency for zlib + +This allows zlib to be detected on macOS and FreeBSD without the use +of pkg-config or cmake, neither of which are part of the base install +on those OSes (but zlib is). + +A side effect of this change is that `dependency('zlib')` also works +with cmake instead of requiring `dependency('ZLIB')`. + +## Added 'name' method + +Build target objects (as returned by executable(), library(), ...) now +have a name() method. + +## New option `--quiet` to `meson install` + +Now you can run `meson install --quiet` and Meson will not verbosely +print every file as it is being installed. As before, the full log is +always available inside the builddir in `meson-logs/install-log.txt`. + +When this option is passed, install scripts will have the environment +variable `MESON_INSTALL_QUIET` set. + +Numerous speed-ups were also made for the install step, especially on +Windows where it is now 300% to 1200% faster than before depending on +your workload. + +## Property support emscripten's wasm-ld + +Before 0.54.0 we treated emscripten as both compiler and linker, which +isn't really true. It does have a linker, called wasm-ld (Meson's name +is ld.wasm). This is a special version of clang's lld. This will now +be detected properly. + +## Skip sanity tests when cross compiling + +For certain cross compilation environments it is not possible to +compile a sanity check application. This can now be disabled by adding +the following entry to your cross file's `properties` section: + +``` +skip_sanity_check = true +``` + +## Support for overiding the linker with ldc and gdc + +LDC (the llvm D compiler) and GDC (The Gnu D Compiler) now honor D_LD +linker variable (or d_ld in the cross file) and is able to pick +different linkers. + +GDC supports all of the same values as GCC, LDC supports ld.bfd, +ld.gold, ld.lld, ld64, link, and lld-link. + +## Native file properties + +As of Meson 0.54.0, the `--native-file nativefile.ini` can contain: + +* binaries +* paths +* properties + +which are defined and used the same way as in cross files. The +`properties` are new for Meson 0.54.0, and are read like: + +```meson +x = meson.get_external_property('foobar', 'foo') +``` + +where `foobar` is the property name, and the optional `foo` is the +fallback string value. + +For cross-compiled projects, `get_external_property()` reads the +cross-file unless `native: true` is specified. + +## Changed the signal used to terminate a test process (group) + +A test process (group) is now terminated via SIGTERM instead of +SIGKILL allowing the signal to be handled. However, it is now the +responsibility of the custom signal handler (if any) to ensure that +any process spawned by the top-level test processes is correctly +killed. + +## Dynamic Linker environment variables actually match docs + +The docs have always claimed that the Dynamic Linker environment +variable should be `${COMPILER_VAR}_LD`, but that's only the case for +about half of the variables. The other half are different. In 0.54.0 +the variables match. The old variables are still supported, but are +deprecated and raise a deprecation warning. + +## Per subproject `default_library` and `werror` options + +The `default_library` and `werror` built-in options can now be defined +per subproject. This is useful for example when building shared +libraries in the main project, but static link a subproject, or when +the main project must build with no warnings but some subprojects +cannot. + +Most of the time this would be used either by the parent project by +setting subproject's default_options (e.g. `subproject('foo', +default_options: 'default_library=static')`), or by the user using the +command line `-Dfoo:default_library=static`. + +The value is overridden in this order: +- Value from parent project +- Value from subproject's default_options if set +- Value from subproject() default_options if set +- Value from command line if set + +## Environment Variables with Cross Builds + +Previously in Meson, variables like `CC` effected both the host and +build platforms for native builds, but the just the build platform for +cross builds. Now `CC_FOR_BUILD` is used for the build platform in +cross builds. + +This old behavior is inconsistent with the way Autotools works, which +undermines the purpose of distro-integration that is the only reason +environment variables are supported at all in Meson. The new behavior +is not quite the same, but doesn't conflict: Meson doesn't always +repond to an environment when Autoconf would, but when it does it +interprets it as Autotools would. + +## Added 'pkg_config_libdir' property + +Allows to define a list of folders used by pkg-config for a cross +build and avoid a system directories use. + +## More new sample Meson templates for (`Java`, `Cuda`, and more) + +Meson now ships with predefined project templates for `Java`, `Cuda`, +`Objective-C++`, and `C#`, we provided with associated values for +corresponding languages, available for both library, and executable. + +## Ninja version requirement bumped to 1.7 + +Meson now uses the [Implicit +outputs](https://ninja-build.org/manual.html#ref_outputs) feature of +Ninja for some types of targets that have multiple outputs which may +not be listed on the command-line. This feature requires Ninja 1.7+. + +Note that the latest version of [Ninja available in Ubuntu +16.04](https://packages.ubuntu.com/search?keywords=ninja-build&searchon=names&suite=xenial-backports§ion=all) +(the oldest Ubuntu LTS at the time of writing) is 1.7.1. If your +distro does not ship with a new-enough Ninja, you can download the +latest release from Ninja's GitHub page: +https://github.com/ninja-build/ninja/releases + +## Added `-C` argument to `meson init` command + +The Meson init assumes that it is run inside the project root +directory. If this isn't the case, you can now use `-C` to specify the +actual project source directory. + +## More than one argument to `message()` and `warning()` + +Arguments passed to `message()` and `warning()` will be printed +separated by space. + +## Added `has_tools` method to qt module + +It should be used to compile optional Qt code: +```meson +qt5 = import('qt5') +if qt5.has_tools(required: get_option('qt_feature')) + moc_files = qt5.preprocess(...) + ... +endif +``` + +## The MSI installer is only available in 64 bit version + +Microsoft ended support for Windows 7, so only 64 bit Windows OSs are +officially supported. Thus only a 64 bit MSI installer will be +provided going forward. People needing a 32 bit version can build +their own with the `msi/createmsi.py` script in Meson's source +repository. + +## Uninstalled pkg-config files + +**Note**: the functionality of this module is governed by [Meson's + rules on mixing build systems](Mixing-build-systems.md). + +The `pkgconfig` module now generates uninstalled pc files as well. For +any generated `foo.pc` file, an extra `foo-uninstalled.pc` file is +placed into `/meson-uninstalled`. They can be used to build +applications against libraries built by Meson without installing them, +by pointing `PKG_CONFIG_PATH` to that directory. This is an +experimental feature provided on a best-effort basis, it might not +work in all use-cases. + +## CMake find_package COMPONENTS support + +It is now possible to pass components to the CMake dependency backend +via the new `components` kwarg in the `dependency` function. + +## Added Microchip XC16 C compiler support + +Make sure compiler executables are setup correctly in your path +Compiler is available from the Microchip website for free + + +## Added Texas Instruments C2000 C/C++ compiler support + +Make sure compiler executables are setup correctly in your path +Compiler is available from Texas Instruments website for free + +## Unity file block size is configurable + +Traditionally the unity files that Meson autogenerates contain all +source files that belong to a single target. This is the most +efficient setting for full builds but makes incremental builds slow. +This release adds a new option `unity_size` which specifies how many +source files should be put in each unity file. + +The default value for block size is 4. This means that if you have a +target that has eight source files, Meson will generate two unity +files each of which includes four source files. The old behaviour can +be replicated by setting `unity_size` to a large value, such as 10000. + +## Verbose mode for `meson compile` + +The new option `--verbose` has been added to `meson compile` that will +enable more verbose compilation logs. Note that for VS backend it +means that logs will be less verbose by default (without `--verbose` +option). diff --git a/meson/docs/markdown/Release-notes-for-0.55.0.md b/meson/docs/markdown/Release-notes-for-0.55.0.md new file mode 100644 index 000000000..110dd1d71 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.55.0.md @@ -0,0 +1,332 @@ +--- +title: Release 0.55.0 +short-description: Release notes for 0.55.0 +... + +# New features + +## rpath removal now more careful + +On Linux-like systems, Meson adds rpath entries to allow running apps +in the build tree, and then removes those build-time-only rpath +entries when installing. Rpath entries may also come in via LDFLAGS +and via .pc files. Meson used to remove those latter rpath entries by +accident, but is now more careful. + +## Added ability to specify targets in `meson compile` + +It's now possible to specify targets in `meson compile`, which will +result in building only the requested targets. + +Usage: `meson compile [TARGET [TARGET...]]` +`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`. +`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). +`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. +`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc) + +`PATH` and/or `TYPE` can be omitted if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. + +For example targets from the following code: +```meson +shared_library('foo', ...) +static_library('foo', ...) +executable('bar', ...) +``` +can be invoked with `meson compile foo:shared_library foo:static_library bar`. + +## Test protocol for gtest + +Due to the popularity of Gtest (google test) among C and C++ +developers Meson now supports a special protocol for gtest. With this +protocol Meson injects arguments to gtests to output JUnit, reads that +JUnit, and adds the output to the JUnit it generates. + +## meson.add_*_script methods accept new types + +All three (`add_install_script`, `add_dist_script`, and +`add_postconf_script`) now accept ExternalPrograms (as returned by +`find_program`), Files, and the output of `configure_file`. The dist and +postconf methods cannot accept other types because of when they are run. +While dist could, in theory, take other dependencies, it would require more +extensive changes, particularly to the backend. + +```meson +meson.add_install_script(find_program('foo'), files('bar')) +meson.add_dist_script(find_program('foo'), files('bar')) +meson.add_postconf_script(find_program('foo'), files('bar')) +``` + +The install script variant is also able to accept custom_targets, +custom_target indexes, and build targets (executables, libraries), and +can use built executables a the script to run + +```meson +installer = executable('installer', ...) +meson.add_install_script(installer, ...) +meson.add_install_script('foo.py', installer) +``` + +## Machine file constants + +Native and cross files now support string and list concatenation using +the `+` operator, and joining paths using the `/` operator. Entries +defined in the `[constants]` section can be used in any other section. +An entry defined in any other section can be used only within that +same section and only after it has been defined. + +```ini +[constants] +toolchain = '/toolchain' +common_flags = ['--sysroot=' + toolchain + '/sysroot'] + +[properties] +c_args = common_flags + ['-DSOMETHING'] +cpp_args = c_args + ['-DSOMETHING_ELSE'] + +[binaries] +c = toolchain + '/gcc' +``` + +## Configure CMake subprojects with Meson.subproject_options + +Meson now supports passing configuration options to CMake and +overriding certain build details extracted from the CMake subproject. + +The new CMake configuration options object is very similar to the +[configuration data +object](Reference-manual.md#configuration-data-object) object returned +by [`configuration_data`](Reference-manual.md#configuration_data). It +is generated by the `subproject_options` function + +All configuration options have to be set *before* the subproject is +configured and must be passed to the `subproject` method via the +`options` key. Altering the configuration object won't have any effect +on previous `cmake.subproject` calls. + +**Note:** The `cmake_options` kwarg for the `subproject` function is +now deprecated since it is replaced by the new `options` system. + +## find_program: Fixes when the program has been overridden by executable + +When a program has been overridden by an executable, the returned +object of find_program() had some issues: + +```meson +# In a subproject: +exe = executable('foo', ...) +meson.override_find_program('foo', exe) + +# In main project: +# The version check was crashing Meson. +prog = find_program('foo', version : '>=1.0') + +# This was crashing Meson. +message(prog.path()) + +# New method to be consistent with built objects. +message(prog.full_path()) +``` + +## Response files enabled on Linux, reined in on Windows + +Meson used to always use response files on Windows, +but never on Linux. + +It now strikes a happier balance, using them on both platforms, +but only when needed to avoid command line length limits. + +## `unstable-kconfig` module renamed to `unstable-keyval` + +The `unstable-kconfig` module is now renamed to `unstable-keyval`. We +expect this module to become stable once it has some usage experience, +specifically in the next or the following release + + +## Fatal warnings in `gnome.generate_gir()` + +`gnome.generate_gir()` now has `fatal_warnings` keyword argument to +abort when a warning is produced. This is useful for example in CI +environment where it's important to catch potential issues. + +## b_ndebug support for D language compilers + +D Language compilers will now set -release/--release/-frelease (depending on +the compiler) when the b_ndebug flag is set. + +## Meson test now produces JUnit xml from results + +Meson will now generate a JUnit compatible XML file from test results. +it will be in the `meson-logs` directory and is called +`testlog.junit.xml`. + +## Config tool based dependencies no longer search PATH for cross compiling + +Before 0.55.0 config tool based dependencies (llvm-config, +cups-config, etc), would search system $PATH if they weren't defined +in the cross file. This has been a source of bugs and has been +deprecated. It is now removed, config tool binaries must be specified +in the cross file now or the dependency will not be found. + +## Rename has_exe_wrapper -> can_run_host_binaries + +The old name was confusing as it didn't really match the behavior of +the function. The old name remains as an alias (the behavior hasn't +changed), but is now deprecated. + +## String concatenation in meson_options.txt + +It is now possible to use string concatenation (with the `+` +opperator) in the `meson_options.txt` file. This allows splitting long +option descriptions. + +```meson +option( + 'testoption', + type : 'string', + value : 'optval', + description : 'An option with a very long description' + + 'that does something in a specific context' +) +``` + +## Wrap fallback URL + +Wrap files can now define `source_fallback_url` and +`patch_fallback_url` to be used in case the main server is temporaly +down. + +## Clang coverage support + +llvm-cov is now used to generate coverage information when clang is +used as the compiler. + +## Local wrap source and patch files + +It is now possible to use the `patch_filename` and `source_filename` +value in a `.wrap` file without `*_url` to specify a local source / +patch file. All local files must be located in the +`subprojects/packagefiles` directory. The `*_hash` entries are +optional with this setup. + +## Local wrap patch directory + +Wrap files can now specify `patch_directory` instead of +`patch_filename` in the case overlay files are local. Every files in +that directory, and subdirectories, will be copied to the subproject +directory. This can be used for example to add `meson.build` files to +a project not using Meson build system upstream. The patch directory +must be placed in `subprojects/packagefiles` directory. + +## Patch on all wrap types + +`patch_*` keys are not limited to `wrap-file` any more, they can be +specified for all wrap types. + +## link_language argument added to all targets + +Previously the `link_language` argument was only supposed to be +allowed in executables, because the linker used needs to be the linker +for the language that implements the main function. Unfortunately it +didn't work in that case, and, even worse, if it had been implemented +properly it would have worked for *all* targets. In 0.55.0 this +restriction has been removed, and the bug fixed. It now is valid for +`executable` and all derivative of `library`. + +## meson dist --no-tests + +`meson dist` has a new option `--no-tests` to skip build and tests of +generated packages. It can be used to not waste time for example when +done in CI that already does its own testing. + +## Force fallback for + +A newly-added `--force-fallback-for` command line option can now be +used to force fallback for specific subprojects. + +Example: + +``` +meson builddir/ --force-fallback-for=foo,bar +``` + +## Implicit dependency fallback + +`dependency('foo')` now automatically fallback if the dependency is +not found on the system but a subproject wrap file or directory exists +with the same name. + +That means that simply adding `subprojects/foo.wrap` is enough to add +fallback to any `dependency('foo')` call. It is however requires that +the subproject call `meson.override_dependency('foo', foo_dep)` to +specify which dependency object should be used for `foo`. + +## Wrap file `provide` section + +Wrap files can define the dependencies it provides in the `[provide]` +section. When `foo.wrap` provides the dependency `foo-1.0` any call do +`dependency('foo-1.0')` will automatically fallback to that subproject +even if no `fallback` keyword argument is given. See [Wrap +documentation](Wrap-dependency-system-manual.md#provide_section). + +## `find_program()` fallback + +When a program cannot be found on the system but a wrap file has its +name in the `[provide]` section, that subproject will be used as +fallback. + +## Test scripts are given the exe wrapper if needed + +Meson will now set the `MESON_EXE_WRAPPER` as the properly wrapped and +joined representation. For Unix-like OSes this means python's +shelx.join, on Windows an implementation that attempts to properly +quote windows argument is used. This allow wrapper scripts to run test +binaries, instead of just skipping. + +for example, if the wrapper is `['emulator', '--script']`, it will be passed +as `MESON_EXE_WRAPPER="emulator --script"`. + +## Added ability to specify backend arguments in `meson compile` + +It's now possible to specify backend specific arguments in `meson compile`. + +Usage: `meson compile [--vs-args=args] [--ninja-args=args]` + +``` + --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend). + --vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend). +``` + +These arguments use the following syntax: + +If you only pass a single string, then it is considered to have all +values separated by commas. Thus invoking the following command: + +``` +$ meson compile --ninja-args=-n,-d,explain +``` + +would add `-n`, `-d` and `explain` arguments to ninja invocation. + +If you need to have commas or spaces in your string values, then you +need to pass the value with proper shell quoting like this: + +``` +$ meson compile "--ninja-args=['a,b', 'c d']" +``` + +## Introspection API changes + +dumping the AST (--ast): **new in 0.55.0** +- prints the AST of a meson.build as JSON + +## `--backend=vs` now matches `-Db_vscrt=from_buildtype` behaviour in the Ninja backend + +When `--buildtype=debugoptimized` is used with the Ninja backend, the +VS CRT option used is `/MD`, which is the [behaviour documented for +all +backends](https://mesonbuild.com/Builtin-options.html#b_vscrt-from_buildtype). +However, the Visual Studio backend was pass `/MT` in that case, which +is inconsistent. + +If you need to use the MultiThreaded CRT, you should explicitly pass +`-Db_vscrt=mt` diff --git a/meson/docs/markdown/Release-notes-for-0.56.0.md b/meson/docs/markdown/Release-notes-for-0.56.0.md new file mode 100644 index 000000000..c196fded9 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.56.0.md @@ -0,0 +1,375 @@ +--- +title: Release 0.56.0 +short-description: Release notes for 0.56.0 +... + +# New features + +## Python 3.5 support will be dropped in the next release + +The final [Python 3.5 release was 3.5.10 in +September](https://www.python.org/dev/peps/pep-0478/#id4). This +release series is now End-of-Life (EOL). The only LTS distribution +that still only ships Python 3.5 is Ubuntu 16.04, which will be [EOL +in April 2021](https://ubuntu.com/about/release-cycle). + +Python 3.6 has numerous features that we find useful such as improved +support for the `typing` module, f-string support, and better +integration with the `pathlib` module. + +As a result, we will begin requiring Python 3.6 or newer in Meson +0.57, which is the next release. Starting with Meson 0.56, we now +print a `NOTICE:` when a `meson` command is run on Python 3.5 to +inform users about this. This notice has also been backported into the +0.55.2 stable release. + +## `meson test` can now filter tests by subproject + +You could always specify a list of tests to run by passing the names +as arguments to `meson test`. If there were multiple tests with that +name (in the same project or different subprojects), all of them would +be run. Now you can: + +1. Run all tests with the specified name from a specific subproject: `meson test subprojname:testname` +1. Run all tests defined in a specific subproject: `meson test subprojectname:` + +As before, these can all be specified multiple times and mixed: + +```sh +# Run: +# * All tests called 'name1' or 'name2' and +# * All tests called 'name3' in subproject 'bar' and +# * All tests in subproject 'foo' +$ meson test name1 name2 bar:name3 foo: +``` + +## Native (build machine) compilers not always required by `project()` + +When cross-compiling, native (build machine) compilers for the +languages specified in `project()` are not required, if no targets use +them. + +## New `extra_files` key in target introspection + +The target introspection (`meson introspect --targets`, +`intro-targets.json`) now has the new `extra_files` key which lists +all files specified via the `extra_files` kwarg of a build target (see +`executable()`, etc.) + + +## Preliminary AIX support + +AIX is now supported when compiling with gcc. A number of features are +not supported yet. For example, only gcc is supported (not xlC). +Archives with both 32-bit and 64-bit dynamic libraries are not +generated automatically. The rpath includes both the build and install +rpath, no attempt is made to change the rpath at install time. Most +advanced features (eg. link\_whole) are not supported yet. + +## Wraps from subprojects are automatically promoted + +It is not required to promote wrap files for subprojects into the main +project any more. When configuring a subproject, Meson will look for +any wrap file or directory in the subproject's `subprojects/` +directory and add them into the global list of available subprojects, +to be used by any future `subproject()` call or `dependency()` +fallback. If a subproject with the same name already exists, the new +wrap file or directory is ignored. That means that the main project +can always override any subproject's wrap files by providing their +own, it also means the ordering in which subprojects are configured +matters, if 2 subprojects provide foo.wrap only the one from the first +subproject to be configured will be used. + +This new behavior can be disabled by passing `--wrap-mode=nopromote`. + +## `meson.build_root()` and `meson.source_root()` are deprecated + +Those function are common source of issue when used in a subproject +because they point to the parent project root which is rarely what is +expected and is a violation of subproject isolation. + +`meson.current_source_dir()` and `meson.current_build_dir()` should be +used instead and have been available in all Meson versions. New +functions `meson.project_source_root()` and +`meson.project_build_root()` have been added in Meson 0.56.0 to get +the root of the current (sub)project. + +## `dep.as_link_whole()` + +Dependencies created with `declare_dependency()` now has new method +`as_link_whole()`. It returns a copy of the dependency object with all +link_with arguments changed to link_whole. This is useful for example +for fallback dependency from a subproject built with +`default_library=static`. + +```meson +somelib = static_library('somelib', ...) +dep = declare_dependency(..., link_with: somelib) +library('someotherlib', ..., dependencies: dep.as_link_whole()) +``` + +## Add support for all Windows subsystem types + +It is now possible to build things like Windows kernel drivers with +the new `win_subsystem` keyword argument. This replaces the old +`gui_app` keyword argument, which is now deprecated. You should update +your project to use the new style like this: + +```meson +# Old way +executable(..., gui_app: 'true') +# New way +executable(..., win_subsystem: 'windows') +``` + +The argument supports versioning [as described on MSDN +documentation](https://docs.microsoft.com/en-us/cpp/build/reference/subsystem-specify-subsystem). +Thus to build a Windows kernel driver with a specific version you'd +write something like this: + +```meson +executable(..., win_subsystem: 'native,6.02') +``` + +## Added NVidia HPC SDK compilers + +Added support for `nvidia_hpc` NVidia HPC SDK compilers, which are currently in public beta testing. + +## Project and built-in options can be set in native or cross files + +A new set of sections has been added to the cross and native files, +`[project options]` and `[:project options]`, where +`subproject_name` is the name of a subproject. Any options that are +allowed in the project can be set from this section. They have the +lowest precedent, and will be overwritten by command line arguments. + + +```meson +option('foo', type : 'string', value : 'foo') +``` + +```ini +[project options] +foo = 'other val' +``` + +```console +meson builddir/ --native-file my.ini +``` + +Will result in the option foo having the value `other val`, + +```console +meson builddir/ --native-file my.ini -Dfoo='different val' +``` + +Will result in the option foo having the value `different val`, + + +Subproject options are assigned like this: + +```ini +[zlib:project options] +foo = 'some val' +``` + +Additionally Meson level options can be set in the same way, using the +`[built-in options]` section. + +```ini +[built-in options] +c_std = 'c99' +``` + +These options can also be set on a per-subproject basis, although only +`default_library` and `werror` can currently be set: +```ini +[zlib:built-in options] +default_library = 'static' +``` + +## `unstable-keyval` is now stable `keyval` + +The `unstable-keyval` has been renamed to `keyval` and now promises stability +guarantees. + +Meson will print a warning when you load an `unstable-` module that has been +stabilised (so `unstable-keyval` is still accepted for example). + +## CMake subproject cross compilation support + +Meson now supports cross compilation for CMake subprojects. Meson will +try to automatically guess most of the required CMake toolchain +variables from existing entries in the cross and native files. These +variables will be stored in an automatically generate CMake toolchain +file in the build directory. The remaining variables that can't be +guessed can be added by the user in the new `[cmake]` cross/native +file section. + +## Machine file keys are stored case sensitive + +Previous the keys were always lowered, which worked fine for the +values that were allowed in the machine files. With the addition of +per-project options we need to make these sensitive to case, as the +options in meson_options.txt are sensitive to case already. + +## Consistency between `declare_dependency()` and `pkgconfig.generate()` variables + +The `variables` keyword argument in `declare_dependency()` used to +only support dictionary and `pkgconfig.generate()` only list of +strings. They now both support dictionary and list of strings in the +format `'name=value'`. This makes easier to share a common set of +variables for both: + +```meson +vars = {'foo': 'bar'} +dep = declare_dependency(..., variables: vars) +pkg.generate(..., variables: vars) +``` + +## Qt5 compile_translations now supports qresource preprocessing + +When using qtmod.preprocess() in combination with +qtmod.compile_translations() to embed translations using rcc, it is no +longer required to do this: + +```meson +ts_files = ['list', 'of', 'files'] +qtmod.compile_translations(ts_files) +# lang.qrc also contains the duplicated list of files +lang_cpp = qtmod.preprocess(qresources: 'lang.qrc') +``` + +Instead, use: +```meson +lang_cpp = qtmod.compile_translations(qresource: 'lang.qrc') +``` + +which will automatically detect and generate the needed +compile_translations targets. + +## Controlling subproject dependencies with `dependency(allow_fallback: ...)` + +As an alternative to the `fallback` keyword argument to `dependency`, +you may use `allow_fallback`, which accepts a boolean value. If `true` +and the dependency is not found on the system, Meson will fallback to +a subproject that provides this dependency, even if the dependency is +optional. If `false`, Meson will not fallback even if a subproject +provides this dependency. + +## Custom standard library + +- It is not limited to cross builds any more, `_stdlib` property can be + set in native files. +- The variable name parameter is no longer required as long as the subproject + calls `meson.override_dependency('c_stdlib', mylibc_dep)`. + +## Improvements for the builtin curses dependency + +This method has been extended to use config-tools, and a fallback to +find_library for lookup as well as pkg-config. + +## HDF5 dependency improvements + +HDF5 has been improved so that the internal representations have been +split. This allows selecting pkg-config and config-tool dependencies +separately. Both work as proper dependencies of their type, so +`get_variable` and similar now work correctly. + +It has also been fixed to use the selected compiler for the build instead of +the default compiler. + +## External projects + +A new experimental module `unstable_external_project` has been added +to build code using other build systems than Meson. Currently only +supporting projects with a configure script that generates Makefiles. + +```meson +project('My Autotools Project', 'c', + meson_version : '>=0.56.0', +) + +mod = import('unstable_external_project') + +p = mod.add_project('configure', + configure_options : ['--prefix=@PREFIX@', + '--libdir=@LIBDIR@', + '--incdir=@INCLUDEDIR@', + '--enable-foo', + ], +) + +mylib_dep = p.dependency('mylib') +``` + + +## Per subproject `warning_level` option + +`warning_level` can now be defined per subproject, in the same way as +`default_library` and `werror`. + +## `meson subprojects` command + +A new `--types` argument has been added to all subcommands to run the +command only on wraps with the specified types. For example this +command will only print `Hello` for each git subproject: `meson +subprojects foreach --types git echo "Hello"`. Multiple types can be +set as comma separated list e.g. `--types git,file`. + +Subprojects with no wrap file are now taken into account as well. This +happens for example for subprojects configured as git submodule, or +downloaded manually by the user and placed into the `subprojects/` +directory. + +The `checkout` subcommand now always stash any pending changes before +switching branch. Note that `update` subcommand was already stashing +changes before updating the branch. + +If the command fails on any subproject the execution continues with +other subprojects, but at the end an error code is now returned. + +The `update` subcommand has been reworked: +- In the case the URL of `origin` is different as the `url` set in wrap file, + the subproject will not be updated unless `--reset` is specified (see below). +- In the case a subproject directory exists and is not a git repository but has + a `[wrap-git]`, Meson used to run git commands that would wrongly apply to the + main project. It now skip the subproject unless `--reset` is specified (see below). +- The `--rebase` behaviour is now the default for consistency: it was + already rebasing when current branch and revision are the same, it is + less confusing to rebase when they are different too. +- Add `--reset` mode that checkout the new branch and hard reset that + branch to remote commit. This new mode guarantees that every + subproject are exactly at the wrap's revision. In addition the URL of `origin` + is updated in case it changed in the wrap file. If the subproject directory is + not a git repository but has a `[wrap-git]` the directory is deleted and the + new repository is cloned. +- Local changes are always stashed first to avoid any data loss. In the + worst case scenario the user can always check reflog and stash list to + rollback. + +## Added CompCert C compiler + +Added experimental support for the [CompCert formally-verified C +compiler](https://github.com/AbsInt/CompCert). The current state of +the implementation is good enough to build the [picolibc +project](https://github.com/picolibc/picolibc) with CompCert, but +might still need additional adjustments for other projects. + +## Dependencies listed in test and benchmark introspection + +The introspection data for tests and benchmarks now includes the +target ids for executables and built files that are needed by the +test. IDEs can use this feature to update the build more quickly +before running a test. + +## `include_type` support for the CMake subproject object dependency method + +The `dependency()` method of the CMake subproject object now also +supports the `include_type` kwarg which is similar to the sane kwarg +in the `dependency()` function. + +## Deprecate Dependency.get_pkgconfig_variable and Dependency.get_configtool_variable + +These have been replaced with the more versatile `get_variable()` method +already, and shouldn't be used anymore. diff --git a/meson/docs/markdown/Release-notes-for-0.57.0.md b/meson/docs/markdown/Release-notes-for-0.57.0.md new file mode 100644 index 000000000..595ebec01 --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.57.0.md @@ -0,0 +1,360 @@ +--- +title: Release 0.57.0 +short-description: Release notes for 0.57.0 +... + +# New features + +## Project version can be specified with a file + +Meson can be instructed to load a project's version string from an +external file like this: + +```meson +project('foo', 'c', version: files('VERSION')) +``` + +The version file must contain exactly one line of text which will +be used as the project's version. If the line ends in a newline +character, it is removed. + +## Support for reading files at configuration time with the `fs` module + +Reading text files during configuration is now supported. This can be done at +any time after `project` has been called + +```meson +project('myproject', 'c') +license_text = run_command( + find_program('python3'), '-c', 'print(open("COPYING").read())' +).stdout().strip() +about_header = configuration_data() +about_header.add('COPYRIGHT', license_text) +about_header.add('ABOUT_STRING', meson.project_name()) +... +``` + +There are several problems with the above approach: +1. It's ugly and confusing +2. If `COPYING` changes after configuration, Meson won't correctly rebuild when + configuration data is based on the data in COPYING +3. It has extra overhead + +`fs.read` replaces the above idiom thus: +```meson +project('myproject', 'c') +fs = import('fs') +license_text = fs.read('COPYING').strip() +about_header = configuration_data() +about_header.add('COPYRIGHT', license_text) +about_header.add('ABOUT_STRING', meson.project_name()) +... +``` + +They are not equivalent, though. Files read with `fs.read` create a +configuration dependency on the file, and so if the `COPYING` file is modified, +Meson will automatically reconfigure, guaranteeing the build is consistent. It +can be used for any properly encoded text files. It supports specification of +non utf-8 encodings too, so if you're stuck with text files in a different +encoding, it can be passed as an argument. See the [`meson` +object](Reference-manual.md#meson-object) documentation for details. + +## meson install --dry-run + +New option to meson install command that does not actually install files, but +only prints messages. + +## Experimental support for C++ modules in Visual Studio + +Modules are a new C++ 20 feature for organising source code aiming to +increase compilation speed and reliability. This support is +experimental and may change in future releases. It only works with the +latest preview release of Visual Studio. + +## Qt6 module + +A module for Qt6 is now available with the same functionality as the Qt5 +module. + +Currently finding Qt6 is only available via `qmake` as pkg-config files aren't +generated (see [QTBUG-86080](https://bugreports.qt.io/browse/QTBUG-86080)) and +CMake support is not available for this module yet. + +## Unstable Rust module + +A new unstable module has been added to make using Rust with Meson easier. +Currently, it adds a single function to ease defining Rust tests, as well as a +wrapper around bindgen, making it easier to use. + +## Meson test() now accepts `protocol : 'rust'` + +This allows native Rust tests to be run and parsed by Meson; simply set the +protocol to `rust` and Meson takes care of the rest. + +## MSVC/Clang-Cl Argument Changes/Cleanup + +* "Disable Debug" (`/Od`) is no longer manually specified for optimization levels {`0`,`g`} (it is already the default for MSVC). +* "Run Time Checking" (`/RTC1`) removed from `debug` buildtype by default +* Clang-CL `debug` buildtype arguments now match MSVC arguments +* There is now no difference between `buildtype` flags and `debug` + `optimization` flags + +The /Od flag has been removed, as it is already the default in the MSVC compilers, and conflicts with other user options. + +/RTC1 conflicts with other RTC argument types as there are many different options, and has been removed by default. +Run Time Checking can be enabled by manually adding `/RTC1` or other RTC flags of your choice. + +The `debug` buildtype for clang-cl added additional arguments compared to MSVC, which had more to do with optimization than debug. The arguments removed are `/Ob0`, `/Od`, `/RTC1`. (`/Zi` was also removed, but it is already added by default when debug is enabled.) + +If these are important issues for you and would like builtin toggle options, +please file an issue in the Meson bug tracker. + +## Buildtype remains even if dependent options are changed + +Setting the `buildtype` option to a value sets the `debug` and +`optimization` options to predefined values. Traditionally setting the +options to other values would then change the buildtype to `custom`. +This is confusing and means that you can't use, for example, debug +level `g` in `debug` buildtype even though it would make sense under +many circumstances. + +Starting with this release, the buildtype is only changed when the user +explicitly sets it; setting the build type modifies the `debug` and +`optimization` options as before. + +## Passing internal dependencies to the compiler object + +Methods on the compiler object (such as `compiles`, `links`, `has_header`) +can be passed dependencies returned by `declare_dependency`, as long as they +only specify compiler/linker arguments or other dependencies that satisfy +the same requirements. + +## `unstable_external_project` improvements + +- Default arguments are added to `add_project()` in case some tags are not found + in `configure_options`: `'--prefix=@PREFIX@'`, `'--libdir=@PREFIX@/@LIBDIR@'`, + and `'--includedir=@PREFIX@/@INCLUDEDIR@'`. It was previously considered a fatal + error to not specify them. + +- When the `verbose` keyword argument is not specified, or is false, command outputs + are written on file in `/meson-logs/`. + +- The `LD` environment variable is not passed any more when running the configure + script. It caused issues because Meson sets `LD` to the `CC` linker wrapper but + autotools expects it to be a real linker (e.g. `/usr/bin/ld`). + +## `gnome.post_install()` + +Post-install update of various system wide caches. Each script will be executed +only once even if `gnome.post_install()` is called multiple times from multiple +subprojects. If `DESTDIR` is specified during installation all scripts will be +skipped. + +Currently supports `glib-compile-schemas`, `gio-querymodules`, and +`gtk-update-icon-cache`. + +## "Edit and continue" (/ZI) is no longer used by default for Visual Studio + +Meson was adding the `/ZI` compiler argument as an argument for Visual Studio +in debug mode. This enables the `edit-and-continue` debugging in +Visual Studio IDE's. + +Unfortunately, it is also extremely expensive and breaks certain use cases such +as link time code generation. Edit and continue can be enabled by manually by +adding `/ZI` to compiler arguments. + +The `/ZI` argument has now been replaced by the `/Zi` argument for debug builds. + +If this is an important issue for you and would like a builtin toggle option, +please file an issue in the Meson bug tracker. + +## Minimum required Python version updated to 3.6 + +Meson now requires at least Python version 3.6 to run as Python 3.5 +reaches EOL on September 2020. In practice this should only affect +people developing on Ubuntu Xenial, which will similarly reach EOL in +April 2021. + +## Packaging a subproject + +The `meson dist` command can now create a distribution tarball for a subproject +in the same git repository as the main project. This can be useful if parts of +the project (e.g. libraries) can be built and distributed separately. In that +case they can be moved into `subprojects/mysub` and running `meson dist` in that +directory will now create a tarball containing only the source code from that +subdir and not the rest of the main project or other subprojects. + +For example: +```sh +git clone https://github.com/myproject +cd myproject/subprojects/mysubproject +meson builddir +meson dist -C builddir +``` + +## `custom_target()` and `run_target()` now accepts an `env` keyword argument + +Environment variables can now be passed to the `custom_target()` command. + +```meson +env = environment() +env.append('PATH', '/foo') +custom_target(..., env: env) +custom_target(..., env: {'MY_ENV': 'value'}) +custom_target(..., env: ['MY_ENV=value']) +``` + +## `summary()` accepts external programs or dependencies + +External program objects and dependency objects can be passed to +`summary()` as the value to be printed. + +## CMake `find_package` version support + +It is now possible to specify a requested package version for the CMake +dependency backend via the new `cmake_package_version` kwarg in the +`dependency` function. + +## `meson test` only rebuilds test dependencies + +Until now, `meson test` rebuilt the whole project independent of the +requested tests and their dependencies. With this release, `meson test` +will only rebuild what is needed for the tests or suites that will be run. +This feature can be used, for example, to speed up bisecting regressions +using commands like the following: + + git bisect start + git bisect run meson test + +This would find the broken commit automatically while at each step +rebuilding only those pieces of code needed to run the test. + +However, this change could cause failures when upgrading to 0.57, if the +dependencies are not specified correctly in `meson.build`. + +## The `add_*_script` methods now accept a File as the first argument + +Meson now accepts `file` objects, including those produced by +`configure_file`, as the first parameter of the various +`add_*_script` methods + +```meson +install_script = configure_file( + configuration : conf, + input : 'myscript.py.in', + output : 'myscript.py', +) + +meson.add_install_script(install_script, other, params) +``` + +## Unity build with Vala disabled + +The approach that meson has used for Vala unity builds is incorrect, we +combine the generated C files like we would any other C file. This is very +fragile however, as the Vala compiler generates helper functions and macros +which work fine when each file is a separate translation unit, but fail when +they are combined. + +## New logging format for `meson test` + +The console output format for `meson test` has changed in several ways. +The major changes are: + +* if stdout is a tty, `meson` includes a progress report. + +* if `--print-errorlogs` is specified, the logs are printed as tests run +rather than afterwards. All the error logs are printed rather than only +the first ten. + +* if `--verbose` is specified and `--num-processes` specifies more than +one concurrent test, test output is buffered and printed after the +test finishes. + +* the console logs include a reproducer command. If `--verbose` is +specified, the command is printed for all tests at the time they start; +otherwise, it is printed for failing tests at the time the test finishes. + +* for TAP and Rust tests, Meson is able to report individual subtests. If +`--verbose` is specified, all tests are reported. If `--print-errorlogs` +is specified, only failures are. + +In addition, if `--verbose` was specified, Meson used not to generate +logs. This limitation has now been removed. + +These changes make the default `ninja test` output more readable, while +`--verbose` output provides detailed, human-readable logs that +are well suited to CI environments. + +## Specify DESTDIR on command line + +`meson install` command now has a `--destdir` argument that overrides `DESTDIR` +from environment. + +## Skip install scripts if DESTDIR is set + +`meson.add_install_script()` now has `skip_if_destdir` keyword argument. If set +to `true` the script won't be run if `DESTDIR` is set during installation. This is +useful in the case the script updates system wide caches, or performs other tasks +that are only needed when copying files into final destination. + +## Add support for prelinked static libraries + +The static library gains a new `prelink` keyword argument that can be +used to prelink object files in that target. This is currently only +supported for the GNU toolchain, patches to add it to other compilers +are most welcome. + +## Rust now has an `std` option + +Rust calls these `editions`, however, Meson generally refers to such language +versions as "standards", or `std` for short. Therefore, Meson's Rust support +uses `std` for consistency with other languages. + +## Ctrl-C behavior in `meson test` + +Starting from this version, sending a `SIGINT` signal (or pressing `Ctrl-C`) +to `meson test` will interrupt the longest running test. Pressing `Ctrl-C` +three times within a second will exit `meson test`. + +## Support added for LLVM's thinLTO + +A new `b_lto_mode` option has been added, which may be set to `default` or +`thin`. Thin only works for clang, and only with gnu gold, lld variants, or +ld64. + +## `test()` timeout and timeout_multiplier value <= 0 + +`test(..., timeout: 0)`, or negative value, used to abort the test immediately +but now instead allow infinite duration. Note that omitting the `timeout` +keyword argument still defaults to 30s timeout. + +Likewise, `add_test_setup(..., timeout_multiplier: 0)`, or +`meson test --timeout-multiplier 0`, or negative value, disable tests timeout. + + +## Knob to control LTO thread + +Both the gnu linker and lld support using threads for speeding up LTO, meson +now provides a knob for this: `-Db_lto_threads`. Currently this is only +supported for clang and gcc. Any positive integer is supported, `0` means +`auto`. If the compiler or linker implements it's on `auto` we use that, +otherwise the number of threads on the machine is used. + +## `summary()` now uses left alignment for both keys and values + +Previously it aligned keys toward the center, but this was deemed harder +to read than having everything left aligned. + +## `//` is now allowed as a function id for `meson rewrite`. + +msys bash may expand `/` to a path, breaking +`meson rewrite kwargs set project / ...`. Passing `//` will be converted to +`/` by msys bash but in order to keep usage shell-agnostic, this release +also allows `//` as the id. This way, `meson rewrite kwargs set project +// ...` will work in both msys bash and other shells. + +## Get keys of configuration data object + +All keys of the `configuration_data` object can be obtained with the `keys()` +method as an alphabetically sorted array. + diff --git a/meson/docs/markdown/Release-notes-for-0.58.0.md b/meson/docs/markdown/Release-notes-for-0.58.0.md new file mode 100644 index 000000000..9a23abb7f --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.58.0.md @@ -0,0 +1,357 @@ +--- +title: Release 0.58.0 +short-description: Release notes for 0.58.0 +... + +# New features + +## New `meson.global_build_root()` and `meson.global_source_root()` methods + +Returns the root source and build directory of the main project. + +Those are direct replacement for `meson.build_root()` and `meson.source_root()` +that have been deprecated since 0.56.0. In some rare occasions they could not be +replaced by `meson.project_source_root()` or `meson.current_source_dir()`, in +which case the new methods can now be used instead. Old methods are still +deprecated because their names are not explicit enough and created many issues +when a project is being used as a subproject. + +## Developer environment + +New method `meson.add_devenv()` adds an [`environment()`](#environment) object +to the list of environments that will be applied when using `meson devenv` +command line. This is useful for developpers who wish to use the project without +installing it, it is often needed to set for example the path to plugins +directory, etc. Alternatively, a list or dictionary can be passed as first +argument. + +``` meson +devenv = environment() +devenv.set('PLUGINS_PATH', meson.current_build_dir()) +... +meson.add_devenv(devenv) +``` + +New command line has been added: `meson devenv -C builddir []`. +It runs a command, or open interactive shell if no command is provided, with +environment setup to run project from the build directory, without installation. + +These variables are set in environment in addition to those set using `meson.add_devenv()`: +- `MESON_DEVENV` is defined to `'1'`. +- `MESON_PROJECT_NAME` is defined to the main project's name. +- `PKG_CONFIG_PATH` includes the directory where Meson generates `-uninstalled.pc` + files. +- `PATH` includes every directory where there is an executable that would be + installed into `bindir`. On windows it also includes every directory where there + is a DLL needed to run those executables. +- `LD_LIBRARY_PATH` includes every directory where there is a shared library that + would be installed into `libdir`. This allows to run system application using + custom build of some libraries. For example running system GEdit when building + GTK from git. On OSX the environment variable is `DYLD_LIBRARY_PATH` and + `PATH` on Windows. +- `GI_TYPELIB_PATH` includes every directory where a GObject Introspection + typelib is built. This is automatically set when using `gnome.generate_gir()`. + +## `-pipe` no longer used by default + +Meson used to add the `-pipe` command line argument to all compilers +that supported it, but no longer does. If you need this, then you can +add it manually. However note that you should not do this unless you +have actually measured that it provides performance improvements. In +our tests we could not find a case where adding `-pipe` made +compilation faster and using `-pipe` [can cause sporadic build +failures in certain +cases](https://github.com/mesonbuild/meson/issues/8508). + +## `meson.add_dist_script()` allowd in subprojects + +`meson.add_dist_script()` can now be invoked from a subproject, it was a hard +error in earlier versions. Subproject dist scripts will only be executed +when running `meson dist --include-subprojects`. `MESON_PROJECT_SOURCE_ROOT`, +`MESON_PROJECT_BUILD_ROOT` and `MESON_PROJECT_DIST_ROOT` environment variables +are set when dist scripts are run. They are identical to `MESON_SOURCE_ROOT`, +`MESON_BUILD_ROOT` and `MESON_DIST_ROOT` for main project scripts, but for +subproject scripts they have the path to the root of the subproject appended, +usually `subprojects/`. + +Note that existing dist scripts likely need to be modified to use those new +environment variables instead of `MESON_DIST_ROOT` to work properly when used +from a subproject. + +## Do not add custom target dir to header path if `implicit_include_directories` is `false` + +If you do the following: + +```meson +# in some subdirectory +gen_h = custom_target(...) +# in some other directory +executable('foo', 'foo.c', gen_h) +``` + +then the output directory of the custom target is automatically added +to the header search path. This is convenient, but sometimes it can +lead to problems. Starting with this version, the directory will no +longer be put in the search path if the target has +`implicit_include_directories: false`. In these cases you need to set +up the path manually with `include_directories`. + +## Multiple append() and prepend() in `environment()` object + +`append()` and `prepend()` methods can now be called multiple times +on the same `varname`. Earlier Meson versions would warn and only the last +opperation was taking effect. + +```meson +env = environment() + +# MY_PATH will be '0:1:2:3' +env.set('MY_PATH', '1') +env.append('MY_PATH', '2') +env.append('MY_PATH', '3') +env.prepend('MY_PATH', '0') +``` + + +## `dep.get_variable(varname)` + +`dep.get_variable()` now has `varname` as first positional argument. +It is used as default value for `cmake`, `pkgconfig`, `configtool` and `internal` +keyword arguments. It is useful in the common case where `pkgconfig` and `internal` +use the same variable name, in which case it's easier to write `dep.get_variable('foo')` +instead of `dep.get_variable(pkgconfig: 'foo', internal: 'foo')`. + + +## clang-format include and ignore lists + +When clang-format is installed and a `.clang-format` file is found at the main +project's root source directory, Meson automatically adds a `clang-format` target +that reformat all C and C++ files. + +It is now possible to restrict files to be reformatted with optional +`.clang-format-include` and `.clang-format-ignore` files. + +The file `.clang-format-include` contains a list of patterns matching the files +that will be reformatted. The `**` pattern matches this directory and all +subdirectories recursively. Empty lines and lines starting with `#` are ignored. +If `.clang-format-include` is not found, the pattern defaults to `**/*` which +means all files recursively in the source directory but has the disadvantage to +walk the whole source tree which could be slow in the case it contains lots of +files. + +Example of `.clang-format-include` file: +``` +# All files in src/ and its subdirectories +src/**/* + +# All files in include/ but not its subdirectories +include/* +``` + +The file `.clang-format-ignore` contains a list of patterns matching the files +that will be excluded. Files matching the include list (see above) that match +one of the ignore pattern will not be reformatted. Unlike include patters, ignore +patterns does not support `**` and a single `*` match any characters including +path separators. Empty lines and lines starting with `#` are ignored. + +The build directory and file without a well known C or C++ suffix are always +ignored. + +Example of `.clang-format-ignore` file: +``` +# Skip C++ files in src/ directory +src/*.cpp +``` + +A new target `clang-format-check` has been added. It returns an error code if +any file needs to be reformatted. This is intended to be used by CI. + +## Introducing format strings to the Meson language + +In addition to the conventional `'A string @0@ to be formatted @1@'.format(n, m)` +method of formatting strings in the Meson language, there's now the additional +`f'A string @n@ to be formatted @m@'` notation that provides a non-positional +and clearer alternative. Meson's format strings are currently restricted to +identity-expressions, meaning `f'format @'m' + 'e'@'` will not parse. + +## Skip subprojects installation + +It is now possible to skip installation of some or all subprojects. This is +useful when subprojects are internal dependencies static linked into the main +project. + +By default all subprojects are still installed. +- `meson install -C builddir --skip-subprojects` installs only the main project. +- `meson install -C builddir --skip-subprojects foo,bar` installs the main project + and all subprojects except for subprojects `foo` and `bar` if they are used. + +## String `.replace()` + +String objects now have a method called replace for replacing all instances of a +substring in a string with another. + +```meson +s = 'aaabbb' +s = s.replace('aaa', 'bbb') +# 's' is now 'bbbbbb' +``` + +## `meson.get_cross_property()` has been deprecated + +It's a pure subset of `meson.get_external_property`, and works strangely in +host == build configurations, since it would be more accurately described as +`get_host_property`. + +## New `range()` function + +``` meson + rangeobject range(stop) + rangeobject range(start, stop[, step]) +``` + +Return an opaque object that can be only be used in `foreach` statements. +- `start` must be integer greater or equal to 0. Defaults to 0. +- `stop` must be integer greater or equal to `start`. +- `step` must be integer greater or equal to 1. Defaults to 1. + +It cause the `foreach` loop to be called with the value from `start` included +to `stop` excluded with an increment of `step` after each loop. + +```meson +# Loop 15 times with i from 0 to 14 included. +foreach i : range(15) + ... +endforeach +``` + +The range object can also be assigned to a variable and indexed. +```meson +r = range(5, 10, 2) +assert(r[2] == 9) +``` + + +## Xcode improvements + +The Xcode backend has been much improved and should now we usable +enough for day to day development. + +## Use fallback from wrap file when force fallback + +Optional dependency like below will now fallback to the subproject +defined in the wrap file in the case `wrap_mode` is set to `forcefallback` +or `force_fallback_for` contains the subproject. + +```meson +# required is false because we could fallback to cc.find_library(), but in the +# forcefallback case this now configure the subproject. +dep = dependency('foo-1.0', required: false) +if not dep.found() + dep = cc.find_library('foo', has_headers: 'foo.h') +endif +``` + +```ini +[wrap-file] +... +[provide] +dependency_names = foo-1.0 +``` + +## `error()` with multiple arguments + +Just like `warning()` and `message()`, `error()` can now take more than one +argument that will be separated by space. + +## Specify man page locale during installation + +Locale directories can now be passed to `install_man`: + +```meson +# instead of +# install_data('foo.fr.1', install_dir: join_paths(get_option('mandir'), 'fr', 'man1'), rename: 'foo.1')` +install_man('foo.fr.1', locale: 'fr') +``` + +## Passing `custom_target()` output to `pkg.generate()` + +It is now allowed to pass libraries generated by a `custom_target()` to +pkg-config file generator. The output filename must have a known library extension +such as `.a`, `.so`, etc. + +## JDK System Dependency + +When building projects such as those interacting with the JNI, you need access +to a few header files located in a Java installation. This system dependency +will add the correct include paths to your target. It assumes that either +`JAVA_HOME` will be set to a valid Java installation, or the default `javac` on +your system is a located in the `bin` directory of a Java installation. Note: +symlinks are resolved. + +```meson +jdk = dependency('jdk', version : '>=1.8') +``` + +Currently this system dependency only works on `linux`, `win32`, and `darwin`. +This can easily be extended given the correct information about your compiler +and platform in an issue. + +## `meson subprojects update --reset` now re-extract tarballs + +When using `--reset` option, the source tree of `[wrap-file]` subprojects is now +deleted and re-extracted from cached tarballs, or re-downloaded. This is because +Meson has no way to know if the source tree or the wrap file has been modified, +and `--reset` should guarantee that latest code is being used on next reconfigure. + +Use `--reset` with caution if you do local changes on non-git subprojects. + +## Allow using generator with CustomTaget or Index of CustomTarget. + +Calling `generator.process()` with either a CustomTaget or Index of CustomTarget +as files is now permitted. + +## Qt Dependency uses a Factory + +This separates the Pkg-config and QMake based discovery methods into two +distinct classes in the backend. This allows using +`dependency.get_variable()` and `dependency.get_pkg_config_variable()`, as +well as being a cleaner implementation. + +## Purge subprojects folder + +It is now possible to purge a subprojects folder of artifacts created +from wrap-based subprojects including anything in `packagecache`. This is useful +when you want to return to a completely clean source tree or busting caches with +stale patch directories or caches. By default the command will only print out +what it is removing. You need to pass `--confirm` to the command for actual +artifacts to be purged. + +By default all wrap-based subprojects will be purged. + +- `meson subprojects purge` prints non-cache wrap artifacts which will be +purged. +- `meson subprojects purge --confirm` purges non-cache wrap artifacts. +- `meson subprojects purge --confirm --include-cache` also removes the cache +artifacts. +- `meson subprojects purge --confirm subproj1 subproj2` removes non-cache wrap +artifacts associated with the listed subprojects. + +## Check if native or cross-file properties exist + +It is now possible to check whether a native property or a cross-file property +exists with `meson.has_external_property('foo')`. This is useful if the +property in question is a boolean and one wants to distinguish between +"set" and "not provided" which can't be done the usual way by passing a +fallback parameter to `meson.get_external_property()` in this particular case. + +## `summary()` accepts features + +Build feature options can be passed to `summary()` as the value to be printed. + +## Address sanitizer support for Visual Studio + +The `b_sanitize` option for enabling Address sanitizer now works with +the Visual Studio compilers. This requires [a sufficiently new version +of Visual +Studio](https://devblogs.microsoft.com/cppblog/address-sanitizer-for-msvc-now-generally-available/). + diff --git a/meson/docs/markdown/Release-notes-for-0.59.0.md b/meson/docs/markdown/Release-notes-for-0.59.0.md new file mode 100644 index 000000000..b6ce6541c --- /dev/null +++ b/meson/docs/markdown/Release-notes-for-0.59.0.md @@ -0,0 +1,235 @@ +--- +title: Release 0.59.0 +short-description: Release notes for 0.59.0 +... + +# New features + +## Unescaped variables in pkgconfig files + +Spaces in variable values are escaped with `\`, this is required in the case the +value is a path that and is used in `cflags` or `libs` arguments. This was an +undocumented behaviour that caused issues in the case the variable is a space +separated list of items. + +For backward compatibility reasons this behaviour could not be changed, new +keyword arguments have thus been added: `unescaped_variables` and +`unescaped_uninstalled_variables`. + +```meson +pkg = import('pkgconfig') +... +pkg.generate(lib, + variables: { + 'mypath': '/path/with spaces/are/escaped', + }, + unescaped_variables: { + 'mylist': 'Hello World Is Not Escaped', + }, +) +``` + +## The custom_target() function now accepts a feed argument + +It is now possible to provide a `feed: true` argument to `custom_target()` to +pipe the target's input file to the program's standard input. + +## Separate functions for qt preprocess + +`qt.preprocess` is a large, complicated function that does a lot of things, +a new set of `compile_*` functions have been provided as well. These are +conceptually simpler, as they do a single thing. + +## Cython as as first class language + +Meson now supports Cython as a first class language. This means you can write: + +```meson +project('my project', 'cython') + +py = import('python').find_installation() +dep_py = py.dependency() + +py.extension_module( + 'foo', + 'foo.pyx', + dependencies : dep_py, +) +``` + +And avoid the step through a generator that was previously required. + +## Support for the Wine Resource Compiler + +Users can now choose `wrc` as the `windres` binary in their cross files and +`windows.compile_resources` will handle it correctly. Together with `winegcc` +patches in Wine 6.12 this enables basic support for compiling projects as a +winelib by specifying `winegcc`/`wineg++` as the compiler and `wrc` as the +resource compiler in a cross file. + +## New `vs2012` and `vs2013` backend options + +Adds the ability to generate Visual Studio 2012 and 2013 projects. This is an +extension to the existing Visual Studio 2010 projects so that it is no longer +required to manually upgrade the generated Visual Studio 2010 projects. + +Generating Visual Studio 2010 projects has also been fixed since its developer +command prompt does not provide a `%VisualStudioVersion%` envvar. + +## Developer environment + +Expand the support for the `link_whole:` project option for pre-Visual Studio 2015 +Update 2, where previously Visual Studio 2015 Update 2 or later was required for +this, for the Ninja backend as well as the vs2010 (as well as the newly-added +vs2012 and vs2013 backends). + +## Fs Module now accepts files objects + +It is now possible to define a `files()` object and run most Fs module +functions on the file, rather than passing a string and hoping it is in the +same directory. + + +## Compiler argument checking for `get_supported_arguments` + +The compiler method `get_supported_arguments` now supports +a new keyword argument named `checked` that can be set to +one of `warn`, `require` or `off` (defaults to `off`) to +enforce argument checks. + +## New custom dependency for libintl + +Meson can now find the library needed for translating messages via gettext. +This works both on systems where libc provides gettext, such as GNU or musl, +and on systems where the gettext project's standalone intl support library is +required, such as macOS. + +Rather than doing something such as: + +``` +intl_dep = dependency('', required: false) + +if cc.has_function('ngettext') + intl_found = true +else + intl_dep = cc.find_library('intl', required: false) + intl_found = intl_dep.found() +endif + +if intl_found + # build options that need gettext + conf.set('ENABLE_NLS', 1) +endif +``` + +one may simply use: + +``` +intl_dep = dependency('intl') + +if intl_dep.found() + # build options that need gettext + conf.set('ENABLE_NLS', 1) +endif +``` + +## Parallelized `meson subprojects` commands + +All `meson subprojects` commands are now run on each subproject in parallel by +default. The number of processes can be controlled with `--num-processes` +argument. + +This speeds up considerably IO-bound operations such as downloads and git fetch. + +## Using Vala no longer requires C in the project languages + +Meson will now add C automatically. Since the use of C is an implementation +detail of Vala, Meson shouldn't require users to add it. + +## The `import()` function gains `required` and `disabler` arguments + +In addition, modules now have a `found()` method, like programs and +dependencies. This allows them to be conditionally required, and used in most +places that an object with a `found()` method can be. + +## Objective C/C++ standard versions + +Objective C and C++ compilations will from now on use the language +versions set in `c_std` and `cpp_std`, respectively. It is not +possible to set the language version separately for Objective C and +plain C. + +## Qt.preprocess source arguments deprecated + +The `qt.preprocess` method currently has this signature: +`qt.preprocess(name: str | None, *srcs: str)`, this is not a nice signature +because it's confusing, and there's a `sources` keyword argument as well. +Both of these pass sources through unmodified, this is a bit of a historical +accident, and not the way that any other module works. These have been +deprecated, so instead of: +```meson +sources = qt.preprocess( + name, + list, of, sources, + sources : [more, sources], + ... # things to process, +) + +executable( + 'foo', + sources, +) +``` +use +```meson +processed = qt.preprocess( + name, + ... # thins to process +) + +executable( + 'foo', + 'list', 'of', 'sources', 'more', 'sources', processed, +) +``` + +## New `build target` methods + +The [`build target` object](Reference-manual.md#build-target-object) now supports +the following two functions, to ensure feature compatebility with +[`external program` objects](Reference-manual.html#external-program-object): + +- `found()`: Always returns `true`. This function is meant + to make executables objects feature compatible with + `external program` objects. This simplifies + use-cases where an executable is used instead of an external program. + +- `path()`: **(deprecated)** does the exact same as `full_path()`. + **NOTE:** This function is solely kept for compatebility + with `external program` objects. It will be + removed once the, also deprecated, corresponding `path()` function in the + `external program` object is removed. + +## Automatically set up Visual Studio environment + +When Meson is run on Windows it will automatically set up the +environment to use Visual Studio if no other compiler toolchain +can be detected. This means that you can run Meson commands from +any command prompt or directly from any IDE. This sets up the +64 bit native environment. If you need any other, then you +need to set it up manually as before. + +## `gnome.compile_schemas()` sets `GSETTINGS_SCHEMA_DIR` into devenv + +When using `gnome.compile_schemas()` the location of the compiled schema is +added to `GSETTINGS_SCHEMA_DIR` environment variable when using +[`meson devenv`](Commands.md#devenv) command. + +## `update_desktop_database` added to `gnome.post_install()` + +Applications that install a `.desktop` file containing a `MimeType` need to update +the cache upon installation. Most applications do that using a custom script, +but it can now be done by Meson directly. + +See [`gnome.post_install()`](Gnome-module.md#gnomepost_install). + diff --git a/meson/docs/markdown/Release-notes.md b/meson/docs/markdown/Release-notes.md new file mode 100644 index 000000000..d7de275fc --- /dev/null +++ b/meson/docs/markdown/Release-notes.md @@ -0,0 +1 @@ +# Release notes diff --git a/meson/docs/markdown/Release-procedure.md b/meson/docs/markdown/Release-procedure.md new file mode 100644 index 000000000..a7ef689c6 --- /dev/null +++ b/meson/docs/markdown/Release-procedure.md @@ -0,0 +1,69 @@ +# Release procedure + +**This page is WIP. The following procedure is not yet approved for use** + +# Trunk + +Meson operates under the principle that trunk should (in theory) be +always good enough for release. That is, all code merged in trunk must +pass all unit tests. Any broken code should either be fixed or +reverted immediately. + +People who are willing to tolerate the occasional glitch should be +able to use Meson trunk for their day to day development if they so +choose. + +# Major releases + +Major releases are currently in the form 0.X.0, where X is an +increasing number. We aim to do a major release roughly once a month, +though the schedule is not set in stone. + +Before a major release is made a stable branch will be made, and +0.X.0-rc1 release candidate will be made. A new milestone for 0.X.0 +will be made, and all bugs effecting the RC will be assigned to this +milestone. Patches fixing bugs in the milestone will be picked to the +stable branch, and normal development will continue on the master +branch. Every week after after this a new release candidate will be +made until all bugs are resolved in that milestone. When all of the +bugs are fixed the 0.X.0 release will be made. + +# Bugfix releases + +Bugfix releases contain only minor fixes to major releases and are +designated by incrementing the last digit of the version number. The +criteria for a bug fix release is one of the following: + + - release has a major regression compared to the previous release (making + existing projects unbuildable) + - the release has a serious bug causing data loss or equivalent + - other unforeseen major issue + +In these cases a bug fix release can be made. It shall contain _only_ +the fix for the issue (or issues) in question and other minor bug +fixes. Only changes that have already landed in trunk will be +considered for inclusion. No new functionality shall be added. + +# Requesting a bug fix release + +The process for requesting that a bug fix release be made goes roughly +as follows: + + - file a bug about the core issue + - file a patch fixing it if possible + - contact the development team and request a bug fix release (IRC is the + preferred contact medium) + +The request should contain the following information: + + - the issue in question + - whether it has already caused problems for real projects + - an estimate of how many people and projects will be affected + +There is no need to write a long and complicated request report. +Something like the following is sufficient: + +> The latest release has a regression where trying to do Foo using Bar +breaks. This breaks all projects that use both, which includes at +least [list of affected projects]. This causes problems for X amount +of people and because of this we should do a bugfix release. diff --git a/meson/docs/markdown/Reproducible-builds.md b/meson/docs/markdown/Reproducible-builds.md new file mode 100644 index 000000000..1e00feae8 --- /dev/null +++ b/meson/docs/markdown/Reproducible-builds.md @@ -0,0 +1,20 @@ +# Reproducible builds + +A reproducible build means the following (as quoted from [the +reproducible builds project site](https://reproducible-builds.org/)): + +> Reproducible builds are a set of software development practices that + create a verifiable path from human readable source code to the + binary code used by computers. + +Roughly what this means is that if two different people compile the +project from source, their outputs are bitwise identical to each +other. This allows people to verify that binaries downloadable from +the net actually come from the corresponding sources and have not, for +example, had malware added to them. + +Meson aims to support reproducible builds out of the box with zero +additional work (assuming the rest of the build environment is set up +for reproducibility). If you ever find a case where this is not +happening, it is a bug. Please file an issue with as much information +as possible and we'll get it fixed. diff --git a/meson/docs/markdown/Rewriter.md b/meson/docs/markdown/Rewriter.md new file mode 100644 index 000000000..018434f2b --- /dev/null +++ b/meson/docs/markdown/Rewriter.md @@ -0,0 +1,247 @@ +--- +short-description: Automatic modification of the build system files +... + +# Meson file rewriter + +Since version 0.50.0, Meson has the functionality to perform some +basic modification on the `meson.build` files from the command line. +The currently supported operations are: + +- For build targets: + - Add/Remove source files + - Add/Remove targets + - Modify a select set of kwargs + - Print some JSON information +- For dependencies: + - Modify a select set of kwargs +- For the project function: + - Modify a select set of kwargs + - Modify the default options list + +The rewriter has both, a normal command line interface and a "script +mode". The normal CLI is mostly designed for everyday use. The "script +mode", on the other hand, is meant to be used by external programs +(IDEs, graphical frontends, etc.) + +The rewriter itself is considered stable, however the user interface +and the "script mode" API might change in the future. These changes +may also break backwards comaptibility to older releases. + +We are also open to suggestions for API improvements. + +## Using the rewriter + +All rewriter functions are accessed via `meson rewrite`. The Meson +rewriter assumes that it is run inside the project root directory. If +this isn't the case, use `--sourcedir` to specify the actual project +source directory. + +### Adding and removing sources + +The most common operations will probably be the adding and removing of source +files to a build target. This can be easily done with: + +```bash +meson rewrite target {add/rm} [list of sources] +``` + +For instance, given the following example + +```meson +src = ['main.cpp', 'fileA.cpp'] + +exe1 = executable('testExe', src) +``` + +the source `fileB.cpp` can be added with: + +```bash +meson rewrite target testExe add fileB.cpp +``` + +After executing this command, the new `meson.build` will look like this: + +```meson +src = ['main.cpp', 'fileA.cpp', 'fileB.cpp'] + +exe1 = executable('testExe', src) +``` + +In this case, `exe1` could also have been used for the target name. +This is possible because the rewriter also searches for assignments +and unique Meson IDs, which can be acquired with introspection. If +there are multiple targets with the same name, Meson will do nothing +and print an error message. + +For more information see the help output of the rewriter target +command. + +### Setting the project version + +It is also possible to set kwargs of specific functions with the +rewriter. The general command for setting or removing kwargs is: + +```bash +meson rewrite kwargs {set/delete} ... +``` + +For instance, setting the project version can be achieved with this command: + +```bash +meson rewrite kwargs set project / version 1.0.0 +``` + +Currently, only the following function types are supported: + +- dependency +- target (any build target, the function ID is the target name/ID) +- project (the function ID must be `/` since project() can only be called once) + +For more information see the help output of the rewrite kwargs command. + +Note msys bash may expand `/` to a path. Passing `//` will be +converted to `/` by msys bash but in order to keep usage +shell-agnostic, the rewrite command also allows `//` as the function +ID such that it will work in both msys bash and other shells. + +### Setting the project default options + +For setting and deleting default options, use the following command: + +```bash +meson rewrite default-options {set/delete} ... +``` + +## Limitations + +Rewriting a Meson file is not guaranteed to keep the indentation of +the modified functions. Additionally, comments inside a modified +statement will be removed. Furthermore, all source files will be +sorted alphabetically. + +For instance adding `e.c` to srcs in the following code + +```meson +# Important comment + +srcs = [ +'a.c', 'c.c', 'f.c', +# something important about b + 'b.c', 'd.c', 'g.c' +] + +# COMMENT +``` + +would result in the following code: + +```meson +# Important comment + +srcs = [ + 'a.c', + 'b.c', + 'c.c', + 'd.c', + 'e.c', + 'f.c', + 'g.c' +] + +# COMMENT +``` + +## Using the "script mode" + +The "script mode" should be the preferred API for third party +programs, since it offers more flexibility and higher API stability. +The "scripts" are stored in JSON format and executed with `meson +rewrite command `. + +The JSON format is defined as follows: + +```json +[ + { + "type": "function to execute", + ... + }, { + "type": "other function", + ... + }, + ... +] +``` + +Each object in the main array must have a `type` entry which specifies which +function should be executed. + +Currently, the following functions are supported: + +- target +- kwargs +- default_options + +### Target modification format + +The format for the type `target` is defined as follows: + +```json +{ + "type": "target", + "target": "target ID/name/assignment variable", + "operation": "one of ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']", + "sources": ["list", "of", "source", "files", "to", "add, remove"], + "subdir": "subdir where the new target should be added (only has an effect for operation 'tgt_add')", + "target_type": "function name of the new target -- same as in the CLI (only has an effect for operation 'tgt_add')" +} +``` + +The keys `sources`, `subdir` and `target_type` are optional. + +### kwargs modification format + +The format for the type `target` is defined as follows: + +```json +{ + "type": "kwargs", + "function": "one of ['dependency', 'target', 'project']", + "id": "function ID", + "operation": "one of ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']", + "kwargs": { + "key1": "value1", + "key2": "value2", + ... + } +} +``` + +### Default options modification format + +The format for the type `default_options` is defined as follows: + +```json +{ + "type": "default_options", + "operation": "one of ['set', 'delete']", + "options": { + "opt1": "value1", + "opt2": "value2", + ... + } +} +``` + +For operation `delete`, the values of the `options` can be anything +(including `null`) + +## Extracting information + +The rewriter also offers operation `info` for the types `target` and +`kwargs`. When this operation is used, Meson will print a JSON dump to +stderr, containing all available information to the rewriter about the +build target / function kwargs in question. + +The output format is currently experimental and may change in the future. diff --git a/meson/docs/markdown/Run-targets.md b/meson/docs/markdown/Run-targets.md new file mode 100644 index 000000000..04959ab3b --- /dev/null +++ b/meson/docs/markdown/Run-targets.md @@ -0,0 +1,55 @@ +--- +short-description: Targets to run external commands +... + +# Run targets + +Sometimes you need to have a target that just runs an external +command. As an example you might have a build target that reformats +your source code, runs `cppcheck` or something similar. In Meson this +is accomplished with a so called *run target*. + +The recommended way of doing this is writing the command(s) you want +to run to a script file. Here's an example script. + +```bash +#!/bin/sh + +cd "${MESON_SOURCE_ROOT}" +inspector_command -o "${MESON_BUILD_ROOT}/inspection_result.txt" +``` + +Note the two environment variables `MESON_SOURCE_ROOT` and +`MESON_BUILD_ROOT`. These are absolute paths to your project's source +and build directories and they are automatically set up by Meson. In +addition to these Meson also sets up the variable `MESON_SUBDIR`, +which points to the subdirectory where the run command was specified. +Most commands don't need to set up this. + +Note how the script starts by cd'ing into the source dir. Meson does +not guarantee that the script is run in any specific directory. +Whether you need to do the same depends on what your custom target +wants to do. + +To make this a run target we write it to a script file called +`scripts/inspect.sh` and specify it in the top level Meson file like +this. + +```meson +run_target('inspector', + command : 'scripts/inspect.sh') +``` + +Run targets are not run by default. To run it run the following command. + +```console +$ meson compile inspector +``` + +All additional entries in `run_target`'s `command` array are passed +unchanged to the inspector script, so you can do things like this: + +```meson +run_target('inspector', + command : ['scripts/inspect.sh', '--exclude', 'tests']) +``` diff --git a/meson/docs/markdown/Running-Meson.md b/meson/docs/markdown/Running-Meson.md new file mode 100644 index 000000000..2873cbc1b --- /dev/null +++ b/meson/docs/markdown/Running-Meson.md @@ -0,0 +1,210 @@ +--- +short-description: Building a project with Meson +... + +# Running Meson + +There are two different ways of invoking Meson. First, you can run it +directly from the source tree with the command +`/path/to/source/meson.py`. Second, Meson may also be installed in which case +the command is simply `meson`. In this manual we only use the latter +format for simplicity. + +At the time of writing only a command line version of Meson is +available. This means that Meson must be invoked using the terminal. +If you wish to use the MSVC compiler, you need to run Meson under +"Visual Studio command prompt". + +All available Meson commands are listed on the [commands reference +page](Commands.md). + +## Configuring the build directory + +Let us assume that we have a source tree that has a Meson build +system. This means that at the topmost directory has a file called +`meson.build`. We run the following commands to get the build started. + +```sh +cd /path/to/source/root +meson setup builddir +``` + +We invoke Meson with the `setup` command, giving it the location of the build +directory. Meson uses [out of source +builds](http://voices.canonical.com/jussi.pakkanen/2013/04/16/why-you-should-consider-using-separate-build-directories/). + +Hint: The syntax of Meson is `meson [command] [arguments] [options]`. +The `setup` command takes a `builddir` and a `srcdir` argument. If no +`srcdir` is given Meson will deduce the `srcdir` based on `pwd` and +the location of `meson.build`. + +Meson then loads the build configuration file and writes the +corresponding build backend in the build directory. By default Meson +generates a *debug build*, which turns on basic warnings and debug +information and disables compiler optimizations. + +Additionally, the invocation can pass options to Meson. The list of +options is documented [here](Builtin-options.md). + +You can specify a different type of build with the `--buildtype` command line +argument. It can have one of the following values. + +| value | meaning | +| ------ | -------- | +| `plain` | no extra build flags are used, even for compiler warnings, useful for distro packagers and other cases where you need to specify all arguments by yourself | +| `debug` | debug info is generated but the result is not optimized, this is the default | +| `debugoptimized` | debug info is generated and the code is optimized (on most compilers this means `-g -O2`) | +| `release` | full optimization, no debug info | + +The build directory is mandatory. The reason for this is that it +simplifies the build process immensely. Meson will not, under any +circumstances, write files inside the source directory (if it does, it +is a bug and should be fixed). This means that the user does not need +to add a bunch of files to their revision control's ignore list. It +also means that you can create arbitrarily many build directories for +any given source tree. + +For example, if we wanted to test building the source code with the +Clang compiler instead of the system default, we could just type the +following commands: + +```sh +cd /path/to/source/root +CC=clang CXX=clang++ meson setup buildclang +``` + +This separation is even more powerful if your code has multiple +configuration options (such as multiple data backends). You can create +a separate subdirectory for each of them. You can also have build +directories for optimized builds, code coverage, static analysis and +so on. They are all neatly separated and use the same source tree. +Changing between different configurations is just a question of +changing to the corresponding directory. + +Unless otherwise mentioned, all following command line invocations are +meant to be run in the source directory. + +By default, Meson will use the Ninja backend to build your project. If +you wish to use any of the other backends, you need to pass the +corresponding argument during configuration time. As an example, here +is how you would use Meson to generate a Visual Studio solution. + +```sh +meson setup --backend=vs +``` + +You can then open the generated solution with Visual Studio and +compile it in the usual way. A list of backends can be obtained with +`meson setup --help`. + +## Environment variables + +Sometimes you want to add extra compiler flags, this can be done by +passing them in environment variables when calling Meson. See [the +reference +tables](Reference-tables.md#compiler-and-linker-flag-environment-variables) +for a list of all the environment variables. Be aware however these +environment variables are only used for the native compiler and will +not affect the compiler used for cross-compiling, where the flags +specified in the cross file will be used. + +Furthermore it is possible to stop Meson from adding flags itself by +using the `--buildtype=plain` option, in this case you must provide +the full compiler and linker arguments needed. + +## Building from the source + +To start the build, simply type the following command. + +```sh +meson compile -C builddir +``` + +See [`meson compile` description](Commands.md#compile) for more info. + +### Building directly with ninja + +By default Meson uses the [Ninja build +system](https://ninja-build.org/) to actually build the code. To start +the build, simply type the following command. + +```sh +ninja -C builddir +``` + +The main usability difference between Ninja and Make is that Ninja +will automatically detect the number of CPUs in your computer and +parallelize itself accordingly. You can override the amount of +parallel processes used with the command line argument `-j `. + +It should be noted that after the initial configure step `ninja` is +the only command you ever need to type to compile. No matter how you +alter your source tree (short of moving it to a completely new +location), Meson will detect the changes and regenerate itself +accordingly. This is especially handy if you have multiple build +directories. Often one of them is used for development (the "debug" +build) and others only every now and then (such as a "static analysis" +build). Any configuration can be built just by `cd`'ing to the +corresponding directory and running Ninja. + +## Running tests + +Meson provides native support for running tests. The command to do +that is simple. + +```sh +meson test -C builddir +``` + +See [`meson test` description](Commands.md#test) for more info. + +Meson does not force the use of any particular testing framework. You +are free to use GTest, Boost Test, Check or even custom executables. + +Note: it can be also invoked directly with ninja with the following command: +```sh +ninja -C builddir test +``` + +## Installing + +Installing the built software is just as simple. + +```sh +meson install -C builddir +``` + +See [`meson install` description](Commands.md#install) for more info. + +Note that Meson will only install build targets explicitly tagged as +installable, as detailed in the [installing targets +documentation](Installing.md). + +By default Meson installs to `/usr/local`. This can be changed by +passing the command line argument `--prefix /your/prefix` to Meson +during configure time. Meson also supports the `DESTDIR` variable used +in e.g. building packages. It is used like this: + +```sh +DESTDIR=/path/to/staging meson install -C builddir +``` + +Note: it can be also invoked directly with ninja with the following +command: + +```sh +ninja -C builddir install +``` + +## Command line help + +Meson has a standard command line help feature. It can be accessed +with the following command. + + meson --help + +## Exit status + +Meson exits with status 0 if successful, 1 for problems with the +command line or meson.build file, and 2 for internal errors. diff --git a/meson/docs/markdown/Rust-module.md b/meson/docs/markdown/Rust-module.md new file mode 100644 index 000000000..b89c052c5 --- /dev/null +++ b/meson/docs/markdown/Rust-module.md @@ -0,0 +1,83 @@ +--- +short-description: Rust language integration module +authors: + - name: Dylan Baker + email: dylan@pnwbakers.com + years: [2020, 2021] +... + +# Unstable Rust module + +*(new in 0.57.0)* + +**Note** Unstable modules make no backwards compatible API guarantees. + +The rust module provides helper to integrate rust code into Meson. The +goal is to make using rust in Meson more pleasant, while still +remaining mesonic, this means that it attempts to make Rust work more +like Meson, rather than Meson work more like rust. + +## Functions + +### test(name: string, target: library | executable, dependencies: []Dependency) + +This function creates a new rust unittest target from an existing rust +based target, which may be a library or executable. It does this by +copying the sources and arguments passed to the original target and +adding the `--test` argument to the compilation, then creates a new +test target which calls that executable, using the rust test protocol. + +This accepts all of the keyword arguments as the +[`test`](Reference-manual.md#test) function except `protocol`, it will set +that automatically. + +Additional, test only dependencies may be passed via the dependencies +argument. + +### bindgen(*, input: string | BuildTarget | []string | []BuildTarget, output: strng, include_directories: []include_directories, c_args: []string, args: []string) + +This function wraps bindgen to simplify creating rust bindings around C +libraries. This has two advantages over hand-rolling ones own with a +`generator` or `custom_target`: + +- It handles `include_directories`, so one doesn't have to manually convert them to `-I...` +- It automatically sets up a depfile, making the results more reliable + + +It takes the following keyword arguments + +- input — A list of Files, Strings, or CustomTargets. The first element is + the header bindgen will parse, additional elements are dependencies. +- output — the name of the output rust file +- include_directories — A list of `include_directories` objects, these are + passed to clang as `-I` arguments +- c_args — A list of string arguments to pass to clang untouched +- args — A list of string arguments to pass to `bindgen` untouched. + +```meson +rust = import('unstable-rust') + +inc = include_directories('..'¸ '../../foo') + +generated = rust.bindgen( + 'myheader.h', + 'generated.rs', + include_directories : [inc, include_directories('foo')], + args : ['--no-rustfmt-bindings'], + c_args : ['-DFOO=1'], +) +``` + +If the header depeneds on generated headers, those headers must be passed to +`bindgen` as well to ensure proper dependency ordering, static headers do not +need to be passed, as a proper depfile is generated: + +```meson +h1 = custom_target(...) +h2 = custom_target(...) + +r1 = rust.bindgen( + [h1, h2], # h1 includes h2, + 'out.rs', +) +``` diff --git a/meson/docs/markdown/Shipping-prebuilt-binaries-as-wraps.md b/meson/docs/markdown/Shipping-prebuilt-binaries-as-wraps.md new file mode 100644 index 000000000..73cf48b08 --- /dev/null +++ b/meson/docs/markdown/Shipping-prebuilt-binaries-as-wraps.md @@ -0,0 +1,36 @@ +# Shipping prebuilt binaries as wraps + +A common dependency case, especially on Windows, is the need to +provide dependencies as prebuilt binaries rather than Meson projects +that you build from scratch. Common reasons include not having access +to source code, not having the time and effort to rewrite a legacy +system's build definitions to Meson or just the fact that compiling +the dependency projects takes too long. + +Packaging a project is straightforward. As an example let's look at a +case where the project consists of one static library called `bob` and +some headers. To create a binary dependency project we put the static +library at the top level and headers in a subdirectory called +`include`. The Meson build definition would look like the following. + +```meson +project('binary dep', 'c') + +cc = meson.get_compiler('c') +bin_dep = declare_dependency( + dependencies : cc.find_library('bob', dirs : meson.current_source_dir()), + include_directories : include_directories('include')) +``` + +Now you can use this subproject as if it was a Meson project: + +```meson +project('using dep', 'c') +bob_dep = subproject('bob').get_variable('bin_dep') +executable('prog', 'prog.c', dependencies : bob_dep) +``` + +Note that often libraries compiled with different compilers (or even +compiler flags) might not be compatible. If you do this, then you are +responsible for verifying that your libraries are compatible, Meson +will not check things for you. diff --git a/meson/docs/markdown/Simd-module.md b/meson/docs/markdown/Simd-module.md new file mode 100644 index 000000000..29f3e952d --- /dev/null +++ b/meson/docs/markdown/Simd-module.md @@ -0,0 +1,72 @@ +# Unstable SIMD module + +This module provides helper functionality to build code with SIMD instructions. +Available since 0.42.0. + +**Note**: this module is unstable. It is only provided as a technology +preview. Its API may change in arbitrary ways between releases or it +might be removed from Meson altogether. + +## Usage + +This module is designed for the use case where you have an algorithm +with one or more SIMD implementation and you choose which one to use +at runtime. + +The module provides one method, `check`, which is used like this: + + rval = simd.check('mysimds', + mmx : 'simd_mmx.c', + sse : 'simd_sse.c', + sse2 : 'simd_sse2.c', + sse3 : 'simd_sse3.c', + ssse3 : 'simd_ssse3.c', + sse41 : 'simd_sse41.c', + sse42 : 'simd_sse42.c', + avx : 'simd_avx.c', + avx2 : 'simd_avx2.c', + neon : 'simd_neon.c', + compiler : cc) + +Here the individual files contain the accelerated versions of the +functions in question. The `compiler` keyword argument takes the +compiler you are going to use to compile them. The function returns an +array with two values. The first value is a bunch of libraries that +contain the compiled code. Any SIMD code that the compiler can't +compile (for example, Neon instructions on an x86 machine) are +ignored. You should pass this value to the desired target using +`link_with`. The second value is a `configuration_data` object that +contains true for all the values that were supported. For example if +the compiler did support sse2 instructions, then the object would have +`HAVE_SSE2` set to 1. + +Generating code to detect the proper instruction set at runtime is +straightforward. First you create a header with the configuration +object and then a chooser function that looks like this: + + void (*fptr)(type_of_function_here) = NULL; + + #if HAVE_NEON + if(fptr == NULL && neon_available()) { + fptr = neon_accelerated_function; + } + #endif + #if HAVE_AVX2 + if(fptr == NULL && avx2_available()) { + fptr = avx_accelerated_function; + } + #endif + + ... + + if(fptr == NULL) { + fptr = default_function; + } + +Each source file provides two functions, the `xxx_available` function +to query whether the CPU currently in use supports the instruction set +and `xxx_accelerated_function` that is the corresponding accelerated +implementation. + +At the end of this function the function pointer points to the fastest +available implementation and can be invoked to do the computation. diff --git a/meson/docs/markdown/Simple-comparison.md b/meson/docs/markdown/Simple-comparison.md new file mode 100644 index 000000000..a8ce17bd8 --- /dev/null +++ b/meson/docs/markdown/Simple-comparison.md @@ -0,0 +1,88 @@ +# A simple comparison + +In this experiment we generated one thousand C files with contents +that looked like this. + +```c +#include +#include"header.h" + +int func23() { return 0; } +``` + +The function number was different in each file. In addition there was +a main C file that just called each function in turn. We then +generated build system files for *Meson*, *CMake*, *SCons*, *Premake* +and *Autotools* that compiled these files into a single executable. + +With this we measured three different things. The first is +configuration time, that is, the time the build system takes to +generate necessary build files. This is usually called the *configure +step*. The time was measured in seconds. + +The second thing to measure was the build time. This should be limited +by the compiler and in the optimal case should be the same for every +build system. Four parallel processes were used in this test. + +The third thing we measured was the empty build time. This measures +how much time the build system takes to check the states of all source +files because if any of them could potentially cause a rebuild. + +Since CMake has two different backends, Make and Ninja, we ran the +tests on both of them. All tests were run on a 2011 era Macbook Pro +running Ubuntu 13/04. The tests were run multiple times and we always +took the fastest time. + +Here are the results for configuration time. + +![Configuration times](images/conftime.png) + +The reason SCons got zero seconds on this test is because you cannot +separate configure and build steps. They run as one unit. Autotools is +the clear loser of this test as it is over an order of magnitude +slower than the second slowest one. This configuration time includes +both autogen and configure. All other systems take less than one +second to do this setup, which is fast enough for a human being to +interpret as instantaneous. + +![Build times](https://raw.githubusercontent.com/wiki/jpakkane/meson/buildtime.png) + +Build times are a bit more even. SCons is the slowest, being almost +ten seconds slower than the second slowest. Some of it is work from +the configure step but it still has the worst performance. Premake is +the fastest Make-based build system narrowly beating out Autotools. +Both Ninja-based build systems are faster than all non-Ninja ones with +Meson being slightly faster. In practice the difference is minimal. +The advantages of Ninja can be seen by comparing CMake's times when +using Make or Ninja. It is possible to shave off 3.5 seconds (over +20%) of the total build time just by changing the backend. The +project's CMake configuration files don't need any changes. + +![No-op time](https://raw.githubusercontent.com/wiki/jpakkane/meson/emptytime.png) + +Empty build times reflect the performance of regular build times. +SCons is again the slowest taking over three seconds compared to +Meson, which takes only 0.03 seconds, a difference of two orders of +magnitude. Even Autotools, the fastest Make-based system, is almost +one order of magnitude slower. Ninja holds the top spots just like in +the previous test. + +Conclusions +----- + +Build system performance matters. Even with this extremely simple +example we can find differences between various popular build systems. +As the project size increases, these differences grow even larger. +(The author has witnessed no-op build times of 30 seconds for Make +versus less than one second for Ninja when compiling the Clang +compiler.) Keeping incremental build times low is one of the major +keys of programmer productivity as it allows developers to iterate +faster and stay in the creative zone. + +Original scripts +----- + +Those who want to run these experiments themselves can download the scripts here: + +* [Generator script](https://raw.githubusercontent.com/wiki/jpakkane/meson/gen_src.py) +* [Measurement script](https://raw.githubusercontent.com/wiki/jpakkane/meson/measure.py) diff --git a/meson/docs/markdown/SimpleStart.md b/meson/docs/markdown/SimpleStart.md new file mode 100644 index 000000000..bf0d7aefb --- /dev/null +++ b/meson/docs/markdown/SimpleStart.md @@ -0,0 +1,150 @@ +--- +short-description: Simple getting started guide +... + +# The Absolute Beginner's Guide to Installing and Using Meson + +This page is meant for people who are new to using Meson and possibly +even to compiling C and/or C++ code in general. It is meant to contain +one simple way of getting your build environment up and running. If +you are more experienced and have your own preferred way of installing +and using development software, feel free to use that instead. This +guide only deals with Linux, Windows and macOS platforms. If you use +some other platform, such as one of the BSDs, you probably already +know how to install development tools on it (probably better than we +do, even). + +There are three phases to getting a development environment running. + +1. Installing a compiler toolchain +2. Installing Meson +3. Creating a project and building it + +## Installing a compiler toolchain + +### Linux + +All Linux distributions provide easy access to development tools. +Typically you need to open a terminal and execute one command, which +depends on your distro. + + - Debian, Ubuntu and derivatives: `sudo apt install build-essential` + - Fedora, Centos, RHEL and derivatives: `sudo dnf install gcc-c++` + - Arch: `sudo pacman -S gcc` + +### Windows + +The most common development toolchain on Windows is Visual Studio, +which can be downloaded from [the Visual Studio web +site](https://visualstudio.microsoft.com/). Select the Community +version unless you have bought a license. + +![Download page of Visual Studio](images/win_dlvs.png) + +Download the installer and run it. When you are given a list of things +to install, select *Desktop development with C++*. This installs both +a C and a C++ compiler. + +![Installing the Visual Studio compilers](images/win_installvs.png) + +Once the installer finishes the compiler toolchain is ready to use. + +### macOS + +On macOS the development toolchain must be installed via the Mac app +store. Search for an app called XCode and install it. + +![App store page for XCode](images/osx_xcode.png) + +*Note:* Installing XCode is not sufficient by itself. You also need to +start XCode' GUI application once. This will make XCode download and +install more files that are needed for compilation. + +## Installing Meson + +### Linux + +Installing Meson is just as simple as installing the compiler toolchain. + + - Debian, Ubuntu and derivatives: `sudo apt install meson ninja-build` + - Fedora, Centos, RHEL and derivatives: `sudo dnf install meson ninja-build` + - Arch: `sudo pacman -S meson` + +### Windows + +Meson provides a standard Windows `.msi` installer that can be +downloaded from [the Releases +page](https://github.com/mesonbuild/meson/releases). + +![Meson installed download](images/win_downloadmeson.png) + +Download and run it to install all the necessary bits. You can verify +that your installation is working by running the Visual Studio +developer tools command prompt that can be found in the start menu. + +![Devtool prompt](images/win_vstoolsprompt.png) + +You should be able to run both `meson` and `ninja` and query their +versions. + +![A working Windows install](images/win_working.png) + +### macOS + +Start by downloading the installation package from [the Releases +page](https://github.com/mesonbuild/meson/releases). + +![Downloading the macOS installer](images/meson_mac1.png) + +Double click the downloaded installer to start. If you are running a +new macOS version, it might refuse to run the application with the +following error message: + +![A macOS signing warning](images/meson_mac2.png) + +This can be fixed by starting System Preferences, selecting the +Security and Privacy entry. At the bottom it shows the blocked app and +you can enable it by clicking on the "Open anyway" button. + +![Security settings](images/meson_mac3.png) + +Now the installer should start. If not, double click on the downloaded +installer file again. + +![Installer running](images/meson_mac4.png) + +Once the installation is finished you can verify that the system is working via the terminal app. + +![A successful install](images/meson_mac5.png) + +## Running Meson + +Start a terminal application. On Windows you have to use the Visual +Studio Developer Tools Command Prompt as discussed above, because the +compilers are only available in that terminal. You also need to change +into your home directory (Linux and macOS terminals start in the home +directory by default). + + cd \users\username + +Create a new directory to hold your project. + + mkdir testproject + cd testproject + +Use Meson to create and build a new sample project. + + meson init --name testproject --build + +This will create a project skeleton for you and compile it. The result +is put in the `build` subdirectory and can be run directly from there. + + build/testproject + +![All finished and ready to go](images/linux_alldone.png) + +The project is now ready for development. You can edit the code with +any editor and it is rebuilt by going in the `build` subdirectory and +executing the `meson compile` command. If your version of Meson is too +old, you can compile the project by running the command `ninja` +instead. diff --git a/meson/docs/markdown/SourceSet-module.md b/meson/docs/markdown/SourceSet-module.md new file mode 100644 index 000000000..26c199552 --- /dev/null +++ b/meson/docs/markdown/SourceSet-module.md @@ -0,0 +1,212 @@ +--- +short-description: Source set module +authors: + - name: Paolo Bonzini + email: pbonzini@redhat.com + years: [2019] +... + +# Source set module + +This module provides support for building many targets against a +single set of files; the choice of which files to include in each +target depends on the contents of a dictionary or a +`configuration_data` object. The module can be loaded with: + +```meson +ssmod = import('sourceset') +``` + +A simple example of using the module looks like this: + +```meson +ss = ssmod.source_set() +# Include main.c unconditionally +ss.add(files('main.c')) +# Include a.c if configuration key FEATURE1 is true +ss.add(when: 'FEATURE1', if_true: files('a.c')) +# Include zlib.c if the zlib dependency was found, and link zlib +# in the executable +ss.add(when: zlib, if_true: files('zlib.c')) +# many more rules here... +ssconfig = ss.apply(config) +executable('exe', sources: ssconfig.sources(), + dependencies: ssconfig.dependencies()) +``` + +and it would be equivalent to + +```meson +sources = files('main.c') +dependencies = [] +if config['FEATURE1'] then + sources += [files('a.c')] +endif +if zlib.found() then + sources += [files('zlib.c')] + dependencies += [zlib] +endif +# many more "if"s here... +executable('exe', sources: sources, dependencies: dependencies()) +``` + +Sourcesets can be used with a single invocation of the `apply` method, +similar to the example above, but the module is especially useful when +multiple executables are generated by applying the same rules to many +different configurations. + +*Added 0.51.0* + +## Functions + +### `source_set()` + +```meson +ssmod.source_set() +``` + +Create and return a new source set object. + +**Returns**: a [source set][`source_set` object] + +## `source_set` object + +The `source_set` object provides methods to add files to a source set +and to query it. The source set becomes immutable after any method but +`add` is called. + +### Methods + +#### `add()` + +```meson +source_set.add([when: varnames_and_deps], + [if_true: sources_and_deps], + [if_false: list_of_alt_sources]) +source_set.add(sources_and_deps) +``` + +Add a *rule* to a source set. A rule determines the conditions under +which some source files or dependency objects are included in a build +configuration. All source files must be present in the source tree or +they can be created in the build tree via `configure_file`, +`custom_target` or `generator`. + +`varnames_and_deps` is a list of conditions for the rule, which can be +either strings or dependency objects (a dependency object is anything +that has a `found()` method). If *all* the strings evaluate to true +and all dependencies are found, the rule will evaluate to true; +`apply()` will then include the contents of the `if_true` keyword +argument in its result. Otherwise, that is if any of the strings in +the positional arguments evaluate to false or any dependency is not +found, `apply()` will instead use the contents of the `if_false` +keyword argument. + +Dependencies can also appear in `sources_and_deps`. In this case, a +missing dependency will simply be ignored and will *not* disable the +rule, similar to how the `dependencies` keyword argument works in +build targets. + +**Note**: It is generally better to avoid mixing source sets and +disablers. This is because disablers will cause the rule to be dropped +altogether, and the `list_of_alt_sources` would not be taken into +account anymore. + +#### `add_all()` + +```meson +source_set.add_all(when: varnames_and_deps, + if_true: [source_set1, source_set2, ...]) +source_set.add_all(source_set1, source_set2, ...) +``` + +Add one or more source sets to another. + +For each source set listed in the arguments, `apply()` will consider +their rules only if the conditions in `varnames_and_deps` are +evaluated positively. For example, the following: + +```meson +sources_b = ssmod.source_set() +sources_b.add(when: 'HAVE_A', if_true: 'file.c') +sources = ssmod.source_set() +sources.add_all(when: 'HAVE_B', if_true: sources_b) +``` + +is equivalent to: + +```meson +sources = ssmod.source_set() +sources.add(when: ['HAVE_A', 'HAVE_B'], if_true: 'file.c') +``` + +#### `all_sources()` + +```meson +list source_set.all_sources(...) +``` + +Returns a list of all sources that were placed in the source set using +`add` (including nested source sets) and that do not have a not-found +dependency. If a rule has a not-found dependency, only the `if_false` +sources are included (if any). + +**Returns**: a list of file objects + +#### `all_dependencies()` *(since 0.52.0)* + +```meson +list source_set.all_dependencies(...) +``` + +Returns a list of all dependencies that were placed in the source set +using `add` (including nested source sets) and that were found. + +**Returns**: a list of dependencies + +#### `apply()` + +```meson +source_files source_set.apply(conf_data[, strict: false]) +``` + +Match the source set against a dictionary or a `configuration_data` +object and return a *source configuration* object. A source +configuration object allows you to retrieve the sources and +dependencies for a specific configuration. + +By default, all the variables that were specified in the rules have to +be present in `conf_data`. However, in some cases the convention is +that `false` configuration symbols are absent in `conf_data`; this is +the case for example when the configuration was loaded from a Kconfig +file. In that case you can specify the `strict: false` keyword +argument, which will treat absent variables as false. + +**Returns**: a [source configuration][`source_configuration` object] + +## `source_configuration` object + +The `source_configuration` object provides methods to query the result of an +`apply` operation on a source set. + +### Methods + +#### `sources()` + +```meson +source_config.sources() +``` + +Return the source files corresponding to the applied configuration. + +**Returns**: a list of file objects + +#### `dependencies()` + +```meson +source_config.dependencies() +``` + +Return the dependencies corresponding to the applied configuration. + +**Returns**: a list of dependency objects diff --git a/meson/docs/markdown/Style-guide.md b/meson/docs/markdown/Style-guide.md new file mode 100644 index 000000000..f204a9a70 --- /dev/null +++ b/meson/docs/markdown/Style-guide.md @@ -0,0 +1,87 @@ +--- +short-description: Style recommendations for Meson files +... + +# Style recommendations + +This page lists some recommendations on organizing and formatting your +Meson build files. + +## Tabs or spaces? + +Always spaces. + +## Naming Variable + +Snake case (stylized as `snake_case`) refers to the style of writing in which +each space is replaced by an underscore (`_`) character, and the first letter of +each word written in lowercase. It is the most common naming convention used +in Meson build scripts as identifiers for variable. + +Let say you would like to refer to your executable so something like `my_exe`. + +## Dependency usage + +The `dependency` function is the recommended way to handle +dependencies. If your wrap files have the necessary `[provide]` +entries, everything will work automatically both when compiling your +own and when using system dependencies. + +You should only need `subproject` when you need to extract non dependencies/programs. + +## Naming options + +There are two ways of naming project options. As an example for +booleans the first one is `foo` and the second one is `enable-foo`. +The former style is recommended, because in Meson options have strong +type, rather than being just strings. + +You should try to name options the same as is common in other +projects. This is especially important for yielding options, because +they require that both the parent and subproject options have the same +name. + +# Global arguments + +Prefer `add_project_arguments` to `add_global_arguments` because using +the latter prevents using the project as a subproject. + +# Cross compilation arguments + +Try to keep cross compilation arguments away from your build files as +much as possible. Keep them in the cross file instead. This adds +portability, since all changes needed to compile to a different +platform are isolated in one place. + +# Sorting source paths + +The source file arrays should all be sorted. This makes it easier to +spot errors and often reduces merge conflicts. Furthermore, the paths +should be sorted with a natural sorting algorithm, so that numbers are +sorted in an intuitive way (`1, 2, 3, 10, 20` instead of `1, 10, 2, +20, 3`). + +Numbers should also be sorted before characters (`a111` before `ab0`). +Furthermore, strings should be sorted case insensitive. + +Additionally, if a path contains a directory it should be sorted before +normal files. This rule also applies recursively for subdirectories. + +The following example shows correct source list definition: + +```meson +sources = files([ + 'aaa/a1.c', + 'aaa/a2.c', + 'bbb/subdir1/b1.c', + 'bbb/subdir2/b2.c', + 'bbb/subdir10/b3.c', + 'bbb/subdir20/b4.c', + 'bbb/b5.c', + 'bbb/b6.c', + 'f1.c', + 'f2.c', + 'f10.c', + 'f20.c' +]) +``` diff --git a/meson/docs/markdown/Subprojects.md b/meson/docs/markdown/Subprojects.md new file mode 100644 index 000000000..7cb04e0eb --- /dev/null +++ b/meson/docs/markdown/Subprojects.md @@ -0,0 +1,382 @@ +--- +short-description: Using Meson projects as subprojects within other Meson projects +... + +# Subprojects + +Some platforms do not provide a native packaging system. In these +cases it is common to bundle all third party libraries in your source +tree. This is usually frowned upon because it makes it hard to add +these kinds of projects into e.g. those Linux distributions that +forbid bundled libraries. + +Meson tries to solve this problem by making it extremely easy to +provide both at the same time. The way this is done is that Meson +allows you to take any other Meson project and make it a part of your +build without (in the best case) any changes to its Meson setup. It +becomes a transparent part of the project. + +It should be noted that this is only guaranteed to work for subprojects +that are built with Meson. The reason is the simple fact that there is +no possible way to do this reliably with mixed build systems. Because of +this, only Meson subprojects are described here. +[CMake based subprojects](CMake-module.md#cmake-subprojects) are also +supported but not guaranteed to work. + +## A subproject example + +Usually dependencies consist of some header files plus a library to +link against. To declare this internal dependency use +`declare_dependency` function. + +As an example, suppose we have a simple project that provides a shared +library. Its `meson.build` would look like this. + +```meson +project('libsimple', 'c') + +inc = include_directories('include') +libsimple = shared_library('simple', + 'simple.c', + include_directories : inc, + install : true) + +libsimple_dep = declare_dependency(include_directories : inc, + link_with : libsimple) +``` + +### Naming convention for dependency variables + +Ideally the dependency variable name should be of `_dep` +form. This way one can just use it without even looking inside build +definitions of that subproject. + +In cases where there are multiple dependencies need to be declared, +the default one should be named as `_dep` (e.g. +`gtest_dep`), and others can have `___dep` +form (e.g. `gtest_main_dep` - gtest with main function). + +There may be exceptions to these rules where common sense should be applied. + +### Adding variables to the dependency + +*New in 0.54.0* + +In some cases a project may define special variables via pkg-config or +cmake that a caller needs to know about. Meson provides a +`dependency.get_variable` method to hide what kind of dependency is +provided, and this is available to subprojects as well. Use the +`variables` keyword to add a dict of strings: + +```meson +my_dep = declare_dependency(..., variables : {'var': 'value', 'number': '3'}) +``` + +Which another project can access via: + +```meson +var = my_dep.get_variable(internal : 'var', cmake : 'CMAKE_VAR') +``` + +The values of the dict must be strings, as pkg-config and cmake will +return variables as strings. + +### Build options in subproject + +All Meson features of the subproject, such as project options keep +working and can be set in the master project. There are a few +limitations, the most important being that global compiler arguments +must be set in the main project before calling subproject. Subprojects +must not set global arguments because there is no way to do that +reliably over multiple subprojects. To check whether you are running +as a subproject, use the `is_subproject` function. + +## Using a subproject + +All subprojects must be inside `subprojects` directory. The +`subprojects` directory must be at the top level of your project. +Subproject declaration must be in your top level `meson.build`. + +### A simple example + +Let's use `libsimple` as a subproject. + +At the top level of your project create `subprojects` directory. +Then copy `libsimple` into `subprojects` directory. + +Your project's `meson.build` should look like this. + +```meson +project('my_project', 'cpp') + +libsimple_proj = subproject('libsimple') +libsimple_dep = libsimple_proj.get_variable('libsimple_dep') + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +Note that the subproject object is *not* used as the dependency, but +rather you need to get the declared dependency from it with +`get_variable` because a subproject may have multiple declared +dependencies. + +### Toggling between system libraries and embedded sources + +When building distro packages it is very important that you do not +embed any sources. Some distros have a rule forbidding embedded +dependencies so your project must be buildable without them or +otherwise the packager will hate you. + +Here's how you would use system libraries and fall back to embedding sources +if the dependency is not available. + +```meson +project('my_project', 'cpp') + +libsimple_dep = dependency('libsimple', required : false) + +if not libsimple_dep.found() + libsimple_proj = subproject('libsimple') + libsimple_dep = libsimple_proj.get_variable('libsimple_dep') +endif + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +Because this is such a common operation, Meson provides a shortcut for +this use case. + +```meson +dep = dependency('foo', fallback : [subproject_name, variable_name]) +``` + +The `fallback` keyword argument takes two items, the name of the +subproject and the name of the variable that holds the dependency. If +you need to do something more complicated, such as extract several +different variables, then you need to do it yourself with the manual +method described above. + +Using this shortcut the build definition would look like this. + +```meson +project('my_project', 'cpp') + +libsimple_dep = dependency('libsimple', fallback : ['libsimple', 'libsimple_dep']) + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +With this setup when libsimple is provided by the system, we use it. +When that is not the case we use the embedded version (the one from +subprojects). + +Note that `libsimple_dep` can point to an external or an internal +dependency but you don't have to worry about their differences. Meson +will take care of the details for you. + +### Subprojects depending on other subprojects + +Subprojects can use other subprojects, but all subprojects must reside +in the top level `subprojects` directory. Recursive use of subprojects +is not allowed, though, so you can't have subproject `a` that uses +subproject `b` and have `b` also use `a`. + +## Obtaining subprojects + +Meson ships with a dependency system to automatically obtain +dependency subprojects. It is documented in the [Wrap dependency +system manual](Wrap-dependency-system-manual.md). + +## Command-line options + +The usage of subprojects can be controlled by users and distros with +the following command-line options: + +* **--wrap-mode=nodownload** + + Meson will not use the network to download any subprojects or + fetch any wrap information. Only pre-existing sources will be used. + This is useful (mostly for distros) when you want to only use the + sources provided by a software release, and want to manually handle + or provide missing dependencies. + +* **--wrap-mode=nofallback** + + Meson will not use subproject fallbacks for any dependency + declarations in the build files, and will only look for them in the + system. Note that this does not apply to unconditional subproject() + calls, and those are meant to be used for sources that cannot be + provided by the system, such as copylibs. + + This option may be overridden by `--force-fallback-for` for specific + dependencies. + +* **--wrap-mode=forcefallback** + + Meson will not look at the system for any dependencies which have + subproject fallbacks available, and will *only* use subprojects for + them. This is useful when you want to test your fallback setup, or + want to specifically build against the library sources provided by + your subprojects. + +* **--force-fallback-for=list,of,dependencies** + + Meson will not look at the system for any dependencies listed there, + provided a fallback was supplied when the dependency was declared. + + This option takes precedence over `--wrap-mode=nofallback`, and when + used in combination with `--wrap-mode=nodownload` will only work + if the dependency has already been downloaded. + + This is useful when your project has many fallback dependencies, + but you only want to build against the library sources for a few + of them. + + **Warning**: This could lead to mixing system and subproject version of the + same library in the same process. Take this case as example: + - Libraries `glib-2.0` and `gstreamer-1.0` are installed on your system. + - `gstreamer-1.0` depends on `glib-2.0`, pkg-config file `gstreamer-1.0.pc` + has `Requires: glib-2.0`. + - In your application build definition you do: + ```meson + executable('app', ..., + dependencies: [ + dependency('glib-2.0', fallback: 'glib'), + dependency('gstreamer-1.0', fallback: 'gstreamer')], + ) + ``` + - You configure with `--force-fallback-for=glib`. + This result in linking to two different versions of library `glib-2.0` + because `dependency('glib-2.0', fallback: 'glib')` will return the + subproject dependency, but `dependency('gstreamer-1.0', fallback: 'gstreamer')` + will not fallback and return the system dependency, including `glib-2.0` + library. To avoid that situation, every dependency that itself depend on + `glib-2.0` must also be forced to fallback, in this case with + `--force-fallback-for=glib,gsteamer`. + +* **--wrap-mode=nopromote** + + *Since 0.56.0* Meson will automatically use wrap files found in subprojects + and copy them into the main project. That new behavior can be disabled by + passing `--wrap-mode=nopromote`. In that case only wraps found in the main + project will be used. + +## `meson subprojects` command + +*Since 0.49.0* + +`meson subprojects` has various subcommands to manage all subprojects. +If the subcommand fails on any subproject the execution continues with +other subprojects. All subcommands accept `--sourcedir` argument +pointing to the root source dir of the main project. + +*Since 0.56.0* all subcommands accept `--types ` +argument to run the subcommands only on subprojects of the given +types. Multiple types can be set as comma separated list e.g. `--types +git,file`. + +*Since 0.56.0* If the subcommand fails on any subproject an error code +is returned at the end instead of retuning success. + +### Download subprojects + +*Since 0.49.0* + +Meson will automatically download needed subprojects during configure, +unless **--wrap-mode=nodownload** option is passed. It is sometimes +preferable to download all subprojects in advance, so the Meson +configure can be performed offline. The command-line `meson +subprojects download` can be used for that, it will download all +missing subprojects, but will not update already fetched subprojects. + +### Update subprojects + +*Since 0.49.0* + +Once a subproject has been fetched, Meson will not update it automatically. +For example if the wrap file tracks a git branch, it won't pull latest commits. + +To pull latest version of all your subprojects at once, just run the command: +`meson subprojects update`. +- If the wrap file comes from wrapdb, the latest version of the wrap file will + be pulled and used next time meson reconfigure the project. This can be + triggered using `meson --reconfigure`. Previous source tree is not deleted, to + prevent from any loss of local changes. *Since 0.58.0* If `--reset` is + specified, the source tree is deleted and new source is extracted. +- If subproject is currently in detached mode, a checkout of the revision from + wrap file is performed. *Since 0.56.0* a rebase is also performed in case the + revision already existed locally but was outdated. If `--reset` is specified, + a hard reset is performed instead of rebase. +- If subproject is currently at the same branch as specified by the wrap file, + a rebase on `origin` commit is performed. *Since 0.56.0* If `--reset` is + specified, a hard reset is performed instead of rebase. +- If subproject is currently in a different branch as specified by the wrap file, + it is skipped unless `--rebase` option is passed in which case a rebase on + `origin` commit is performed. *Since 0.56.0* the `--rebase` argument is + deprecated and has no effect. Instead, a checkout of the revision from wrap file + file is performed and a rebase is also performed in case the revision already + existed locally by was outdated. If `--reset` is specified, a hard reset is + performed instead of rebase. +- *Since 0.56.0* if the `url` specified in wrap file is different to the URL set + on `origin` for a git repository it will not be updated, unless `--reset` is + specified in which case the URL of `origin` will be reset first. +- *Since 0.56.0* If the subproject directory is not a git repository but has a + `[wrap-git]` the subproject is ignored, unless `--reset` is specified in which + case the directory is deleted and the new repository is cloned. + +### Start a topic branch across all git subprojects + +*Since 0.49.0* + +The command-line `meson subprojects checkout ` will +checkout a branch, or create one with `-b` argument, in every git +subprojects. This is useful when starting local changes across +multiple subprojects. It is still your responsibility to commit and +push in each repository where you made local changes. + +To come back to the revision set in wrap file (i.e. master), just run +`meson subprojects checkout` with no branch name. + +*Since 0.56.0* any pending changes are now stashed before checkout a new branch. + +### Execute a command on all subprojects + +*Since 0.51.0* + +The command-line `meson subprojects foreach [...]` will +execute a command in each subproject directory. For example this can +be useful to check the status of subprojects (e.g. with `git status` +or `git diff`) before performing other actions on them. + +## Why must all subprojects be inside a single directory? + +There are several reasons. + +First of all, to maintain any sort of sanity, the system must prevent going +inside other subprojects with `subdir()` or variations thereof. Having the +subprojects in well defined places makes this easy. If subprojects could be +anywhere at all, it would be a lot harder. + +Second of all it is extremely important that end users can easily see what +subprojects any project has. Because they are in one, and only one, place, +reviewing them becomes easy. + +This is also a question of convention. Since all Meson projects have the same +layout w.r.t subprojects, switching between projects becomes easier. You don't +have to spend time on a new project traipsing through the source tree looking +for subprojects. They are always in the same place. + +Finally if you can have subprojects anywhere, this increases the possibility of +having many different (possibly incompatible) versions of a dependency in your +source tree. Then changing some code (such as changing the order you traverse +directories) may cause a completely different version of the subproject to be +used by accident. diff --git a/meson/docs/markdown/Syntax.md b/meson/docs/markdown/Syntax.md new file mode 100644 index 000000000..be3292f98 --- /dev/null +++ b/meson/docs/markdown/Syntax.md @@ -0,0 +1,746 @@ +--- +short-description: Syntax and structure of Meson files +... + +# Syntax + +The syntax of Meson's specification language has been kept as simple +as possible. It is *strongly typed* so no object is ever converted to +another under the covers. Variables have no visible type which makes +Meson *dynamically typed* (also known as *duck typed*). + +The main building blocks of the language are *variables*, *numbers*, +*booleans*, *strings*, *arrays*, *function calls*, *method calls*, *if +statements* and *includes*. + +Usually one Meson statement takes just one line. There is no way to +have multiple statements on one line as in e.g. *C*. Function and +method calls' argument lists can be split over multiple lines. Meson +will autodetect this case and do the right thing. + +In other cases, *(added 0.50)* you can get multi-line statements by +ending the line with a `\`. Apart from line ending whitespace has no +syntactic meaning. + +## Variables + +Variables in Meson work just like in other high level programming +languages. A variable can contain a value of any type, such as an +integer or a string. Variables don't need to be predeclared, you can +just assign to them and they appear. Here's how you would assign +values to two different variables. + +```meson +var1 = 'hello' +var2 = 102 +``` + +One important difference in how variables work in Meson is that all +objects are immutable. When you see an operation which appears like +a mutation, actually a new object is created and assigned to the +name. This is different from, for example, how Python works for +objects, but similar to e.g. Python strings. + +```meson +var1 = [1, 2, 3] +var2 = var1 +var2 += [4] +# var2 is now [1, 2, 3, 4] +# var1 is still [1, 2, 3] +``` + +## Numbers + +Meson supports only integer numbers. They are declared simply by +writing them out. Basic arithmetic operations are supported. + +```meson +x = 1 + 2 +y = 3 * 4 +d = 5 % 3 # Yields 2. +``` + +Hexadecimal literals are supported since version 0.45.0: + +```meson +int_255 = 0xFF +``` + +Octal and binary literals are supported since version 0.47.0: + +```meson +int_493 = 0o755 +int_1365 = 0b10101010101 +``` + +Strings can be converted to a number like this: + +```meson +string_var = '42' +num = string_var.to_int() +``` + +Numbers can be converted to a string: + +```meson +int_var = 42 +string_var = int_var.to_string() +``` + +## Booleans + +A boolean is either `true` or `false`. + +```meson +truth = true +``` + +Booleans can be converted to a string or to a number: + +```meson +bool_var = true +string_var = bool_var.to_string() +int_var = bool_var.to_int() +``` + +## Strings + +Strings in Meson are declared with single quotes. To enter a literal +single quote do it like this: + +```meson +single quote = 'contains a \' character' +``` + +The full list of escape sequences is: + +* `\\` Backslash +* `\'` Single quote +* `\a` Bell +* `\b` Backspace +* `\f` Formfeed +* `\n` Newline +* `\r` Carriage Return +* `\t` Horizontal Tab +* `\v` Vertical Tab +* `\ooo` Character with octal value ooo +* `\xhh` Character with hex value hh +* `\uxxxx` Character with 16-bit hex value xxxx +* `\Uxxxxxxxx` Character with 32-bit hex value xxxxxxxx +* `\N{name}` Character named name in Unicode database + +As in python and C, up to three octal digits are accepted in `\ooo`. + +Unrecognized escape sequences are left in the string unchanged, i.e., the +backslash is left in the string. + +### String concatenation + +Strings can be concatenated to form a new string using the `+` symbol. + +```meson +str1 = 'abc' +str2 = 'xyz' +combined = str1 + '_' + str2 # combined is now abc_xyz +``` + +### String path building + +*(Added 0.49)* + +You can concatenate any two strings using `/` as an operator to build paths. +This will always use `/` as the path separator on all platforms. + +```meson +joined = '/usr/share' / 'projectname' # => /usr/share/projectname +joined = '/usr/local' / '/etc/name' # => /etc/name + +joined = 'C:\\foo\\bar' / 'builddir' # => C:/foo/bar/builddir +joined = 'C:\\foo\\bar' / 'D:\\builddir' # => D:/builddir +``` + +Note that this is equivalent to using [`join_paths()`](Reference-manual.md#join_paths), +which was obsoleted by this operator. + +### Strings running over multiple lines + +Strings running over multiple lines can be declared with three single +quotes, like this: + +```meson +multiline_string = '''#include +int main (int argc, char ** argv) { + return FOO_SUCCESS; +}''' +``` + +These are raw strings that do not support the escape sequences listed +above. These strings can also be combined with the string formatting +functionality described below. + +### String formatting + +#### .format() + +Strings can be built using the string formatting functionality. + +```meson +template = 'string: @0@, number: @1@, bool: @2@' +res = template.format('text', 1, true) +# res now has value 'string: text, number: 1, bool: true' +``` + +As can be seen, the formatting works by replacing placeholders of type +`@number@` with the corresponding argument. + +#### Format strings +*(Added 0.58)* + +Format strings can be used as a non-positional alternative to the +string formatting functionality described above. + +```meson +n = 10 +m = 'hi' + +s = f'int: @n@, string: @m@' +# s now has the value 'int: 10, string: hi' +``` + +Currently only identity-expressions are supported inside of format +strings, meaning you cannot use arbitrary Meson expressions inside of them. + +```meson +n = 10 +m = 5 + +# The following is not a valid format string +s = f'result: @n + m@' +``` + +### String methods + +Strings also support a number of other methods that return transformed +copies. + +#### .replace() + +Since 0.58.0, you can replace a substring from a string. + +```meson +# Replaces all instances of one substring with another +s = 'semicolons;as;separators' +s = s.replace('as', 'are') +# 's' now has the value of 'semicolons;are;separators' +``` + +#### .strip() + +```meson +# Similar to the Python str.strip(). Removes leading/ending spaces and newlines +define = ' -Dsomedefine ' +stripped_define = define.strip() +# 'stripped_define' now has the value '-Dsomedefine' +``` + +#### .to_upper(), .to_lower() + +```meson +target = 'x86_FreeBSD' +upper = target.to_upper() # t now has the value 'X86_FREEBSD' +lower = target.to_lower() # t now has the value 'x86_freebsd' +``` + +#### .to_int() + +```meson +version = '1' +# Converts the string to an int and throws an error if it can't be +ver_int = version.to_int() +``` + +#### .contains(), .startswith(), .endswith() + +```meson +target = 'x86_FreeBSD' +is_fbsd = target.to_lower().contains('freebsd') +# is_fbsd now has the boolean value 'true' +is_x86 = target.startswith('x86') # boolean value 'true' +is_bsd = target.to_lower().endswith('bsd') # boolean value 'true' +``` + +#### .substring() + +Since 0.56.0, you can extract a substring from a string. + +```meson +# Similar to the Python str[start:end] syntax +target = 'x86_FreeBSD' +platform = target.substring(0, 3) # prefix string value 'x86' +system = target.substring(4) # suffix string value 'FreeBSD' +``` + +The method accepts negative values where negative `start` is relative to the end of +string `len(string) - start` as well as negative `end`. + +```meson +string = 'foobar' +string.substring(-5, -3) # => 'oo' +string.substring(1, -1) # => 'ooba' +``` + +#### .split(), .join() + +```meson +# Similar to the Python str.split() +components = 'a b c d '.split() +# components now has the value ['a', 'b', 'c', 'd'] +components = 'a b c d '.split(' ') +# components now has the value ['a', 'b', '', '', 'c', 'd', ''] + +# Similar to the Python str.join() +output = ' '.join(['foo', 'bar']) +# Output value is 'foo bar' +pathsep = ':' +path = pathsep.join(['/usr/bin', '/bin', '/usr/local/bin']) +# path now has the value '/usr/bin:/bin:/usr/local/bin' + +# For joining path elements, you should use path1 / path2 +# This has the advantage of being cross-platform +path = '/usr' / 'local' / 'bin' +# path now has the value '/usr/local/bin' + +# For sources files, use files(): +my_sources = files('foo.c') +... +my_sources += files('bar.c') +# This has the advantage of always calculating the correct relative path, even +# if you add files in another directory or use them in a different directory +# than they're defined in + +# Example to set an API version for use in library(), install_header(), etc +project('project', 'c', version: '0.2.3') +version_array = meson.project_version().split('.') +# version_array now has the value ['0', '2', '3'] +api_version = '.'.join([version_array[0], version_array[1]]) +# api_version now has the value '0.2' + +# We can do the same with .format() too: +api_version = '@0@.@1@'.format(version_array[0], version_array[1]) +# api_version now (again) has the value '0.2' +``` + +#### .underscorify() + +```meson +name = 'Meson Docs.txt#Reference-manual' +# Replaces all characters other than `a-zA-Z0-9` with `_` (underscore) +# Useful for substituting into #defines, filenames, etc. +underscored = name.underscorify() +# underscored now has the value 'Meson_Docs_txt_Reference_manual' +``` + +#### .version_compare() + +```meson +version = '1.2.3' +# Compare version numbers semantically +is_new = version.version_compare('>=2.0') +# is_new now has the boolean value false +# Supports the following operators: '>', '<', '>=', '<=', '!=', '==', '=' +``` + +Meson version comparison conventions include: + +```meson +'3.6'.version_compare('>=3.6.0') == false +``` + +It is best to be unambiguous and specify the full revision level to compare. + +## Arrays + +Arrays are delimited by brackets. An array can contain an arbitrary number of objects of any type. + +```meson +my_array = [1, 2, 'string', some_obj] +``` + +Accessing elements of an array can be done via array indexing: + +```meson +my_array = [1, 2, 'string', some_obj] +second_element = my_array[1] +last_element = my_array[-1] +``` + +You can add more items to an array like this: + +```meson +my_array += ['foo', 3, 4, another_obj] +``` + +When adding a single item, you do not need to enclose it in an array: + +```meson +my_array += ['something'] +# This also works +my_array += 'else' +``` + +Note appending to an array will always create a new array object and +assign it to `my_array` instead of modifying the original since all +objects in Meson are immutable. + +Since 0.49.0, you can check if an array contains an element like this: + +```meson +my_array = [1, 2] +if 1 in my_array +# This condition is true +endif +if 1 not in my_array +# This condition is false +endif +``` + +### Array methods + +The following methods are defined for all arrays: + + - `length`, the size of the array + - `contains`, returns `true` if the array contains the object given as argument, `false` otherwise + - `get`, returns the object at the given index, negative indices count from the back of the array, indexing out of bounds is a fatal error. Provided for backwards-compatibility, it is identical to array indexing. + +## Dictionaries + +Dictionaries are delimited by curly braces. A dictionary can contain +an arbitrary number of key: value pairs. Keys are required to be +strings, but values can be objects of any type. Prior to *0.53.0* keys +were required to be literal strings, i.e., you could not use a +variable containing a string value as a key. + +```meson +my_dict = {'foo': 42, 'bar': 'baz'} +``` + +Keys must be unique: + +```meson +# This will fail +my_dict = {'foo': 42, 'foo': 43} +``` + +Dictionaries are immutable and do not have a guaranteed order. + +Dictionaries are available since 0.47.0. + +Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read +about the methods exposed by dictionaries. + +Since 0.49.0, you can check if a dictionary contains a key like this: + +```meson +my_dict = {'foo': 42, 'bar': 43} +if 'foo' in my_dict +# This condition is true +endif +if 42 in my_dict +# This condition is false +endif +if 'foo' not in my_dict +# This condition is false +endif +``` + +*Since 0.53.0* Keys can be any expression evaluating to a string +value, not limited to string literals any more. + +```meson +d = {'a' + 'b' : 42} +k = 'cd' +d += {k : 43} +``` + +## Function calls + +Meson provides a set of usable functions. The most common use case is +creating build objects. + +```meson +executable('progname', 'prog.c') +``` + +Most functions take only few positional arguments but several keyword +arguments, which are specified like this: + +```meson +executable('progname', + sources: 'prog.c', + c_args: '-DFOO=1') +``` + +Starting with version 0.49.0 keyword arguments can be specified +dynamically. This is done by passing dictionary representing the +keywords to set in the `kwargs` keyword. The previous example would be +specified like this: + +```meson +d = {'sources': 'prog.c', + 'c_args': '-DFOO=1'} + +executable('progname', + kwargs: d) +``` + +A single function can take keyword argumets both directly in the +function call and indirectly via the `kwargs` keyword argument. The +only limitation is that it is a hard error to pass any particular key +both as a direct and indirect argument. + +```meson +d = {'c_args': '-DFOO'} +executable('progname', 'prog.c', + c_args: '-DBAZ=1', + kwargs: d) # This is an error! +``` + +Attempting to do this causes Meson to immediately exit with an error. + +## Method calls + +Objects can have methods, which are called with the dot operator. The +exact methods it provides depends on the object. + +```meson +myobj = some_function() +myobj.do_something('now') +``` + +## If statements + +If statements work just like in other languages. + +```meson +var1 = 1 +var2 = 2 +if var1 == var2 # Evaluates to false + something_broke() +elif var3 == var2 + something_else_broke() +else + everything_ok() +endif + +opt = get_option('someoption') +if opt != 'foo' + do_something() +endif +``` + +## Logical operations + +Meson has the standard range of logical operations which can be used in +`if` statements. + +```meson +if a and b + # do something +endif +if c or d + # do something +endif +if not e + # do something +endif +if not (f or g) + # do something +endif +``` + +Logical operations work only on boolean values. + +## Foreach statements + +To do an operation on all elements of an iterable, use the `foreach` +command. + +> Note that Meson variables are immutable. Trying to assign a new value +> to the iterated object inside a foreach loop will not affect foreach's +> control flow. + +### Foreach with an array + +Here's an example of how you could define two executables +with corresponding tests using arrays and foreach. + +```meson +progs = [['prog1', ['prog1.c', 'foo.c']], + ['prog2', ['prog2.c', 'bar.c']]] + +foreach p : progs + exe = executable(p[0], p[1]) + test(p[0], exe) +endforeach +``` + +### Foreach with a dictionary + +Here's an example of you could iterate a set of components that +should be compiled in according to some configuration. This uses +a [dictionary][dictionaries], which is available since 0.47.0. + +```meson +components = { + 'foo': ['foo.c'], + 'bar': ['bar.c'], + 'baz': ['baz.c'], +} + +# compute a configuration based on system dependencies, custom logic +conf = configuration_data() +conf.set('USE_FOO', 1) + +# Determine the sources to compile +sources_to_compile = [] +foreach name, sources : components + if conf.get('USE_@0@'.format(name.to_upper()), 0) == 1 + sources_to_compile += sources + endif +endforeach +``` + +### Foreach `break` and `continue` + +Since 0.49.0 `break` and `continue` keywords can be used inside foreach loops. + +```meson +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach +# result is ['a', 'b'] +``` + +## Comments + +A comment starts with the `#` character and extends until the end of the line. + +```meson +some_function() # This is a comment +some_other_function() +``` + +## Ternary operator + +The ternary operator works just like in other languages. + +```meson +x = condition ? true_value : false_value +``` + +The only exception is that nested ternary operators are forbidden to +improve legibility. If your branching needs are more complex than this +you need to write an `if/else` construct. + +## Includes + +Most source trees have multiple subdirectories to process. These can +be handled by Meson's `subdir` command. It changes to the given +subdirectory and executes the contents of `meson.build` in that +subdirectory. All state (variables etc) are passed to and from the +subdirectory. The effect is roughly the same as if the contents of the +subdirectory's Meson file would have been written where the include +command is. + +```meson +test_data_dir = 'data' +subdir('tests') +``` + +## User-defined functions and methods + +Meson does not currently support user-defined functions or methods. +The addition of user-defined functions would make Meson +Turing-complete which would make it harder to reason about and more +difficult to integrate with tools like IDEs. More details about this +are [in the +FAQ](FAQ.md#why-is-meson-not-just-a-python-module-so-i-could-code-my-build-setup-in-python). +If because of this limitation you find yourself copying and pasting +code a lot you may be able to use a [`foreach` loop +instead](#foreach-statements). + +## Stability Promises + +Meson is very actively developed and continuously improved. There is a +possibility that future enhancements to the Meson build system will +require changes to the syntax. Such changes might be the addition of +new reserved keywords, changing the meaning of existing keywords or +additions around the basic building blocks like statements and +fundamental types. It is planned to stabilize the syntax with the 1.0 +release. + +## Grammar + +This is the full Meson grammar, as it is used to parse Meson build definition files: + +``` +additive_expression: multiplicative_expression | (additive_expression additive_operator multiplicative_expression) +additive_operator: "+" | "-" +argument_list: positional_arguments ["," keyword_arguments] | keyword_arguments +array_literal: "[" [expression_list] "]" +assignment_expression: conditional_expression | (logical_or_expression assignment_operator assignment_expression) +assignment_operator: "=" | "*=" | "/=" | "%=" | "+=" | "-=" +boolean_literal: "true" | "false" +build_definition: (NEWLINE | statement)* +condition: expression +conditional_expression: logical_or_expression | (logical_or_expression "?" expression ":" assignment_expression +decimal_literal: DECIMAL_NUMBER +DECIMAL_NUMBER: /[1-9][0-9]*/ +dictionary_literal: "{" [key_value_list] "}" +equality_expression: relational_expression | (equality_expression equality_operator relational_expression) +equality_operator: "==" | "!=" +expression: assignment_expression +expression_list: expression ("," expression)* +expression_statement: expression +function_expression: id_expression "(" [argument_list] ")" +hex_literal: "0x" HEX_NUMBER +HEX_NUMBER: /[a-fA-F0-9]+/ +id_expression: IDENTIFIER +IDENTIFIER: /[a-zA-Z_][a-zA-Z_0-9]*/ +identifier_list: id_expression ("," id_expression)* +integer_literal: decimal_literal | octal_literal | hex_literal +iteration_statement: "foreach" identifier_list ":" id_expression NEWLINE (statement | jump_statement)* "endforeach" +jump_statement: ("break" | "continue") NEWLINE +key_value_item: expression ":" expression +key_value_list: key_value_item ("," key_value_item)* +keyword_item: id_expression ":" expression +keyword_arguments: keyword_item ("," keyword_item)* +literal: integer_literal | string_literal | boolean_literal | array_literal | dictionary_literal +logical_and_expression: equality_expression | (logical_and_expression "and" equality_expression) +logical_or_expression: logical_and_expression | (logical_or_expression "or" logical_and_expression) +method_expression: postfix_expression "." function_expression +multiplicative_expression: unary_expression | (multiplicative_expression multiplicative_operator unary_expression) +multiplicative_operator: "*" | "/" | "%" +octal_literal: "0o" OCTAL_NUMBER +OCTAL_NUMBER: /[0-7]+/ +positional_arguments: expression ("," expression)* +postfix_expression: primary_expression | subscript_expression | function_expression | method_expression +primary_expression: literal | ("(" expression ")") | id_expression +relational_expression: additive_expression | (relational_expression relational_operator additive_expression) +relational_operator: ">" | "<" | ">=" | "<=" | "in" | ("not" "in") +selection_statement: "if" condition NEWLINE (statement)* ("elif" condition NEWLINE (statement)*)* ["else" (statement)*] "endif" +statement: (expression_statement | selection_statement | iteration_statement) NEWLINE +string_literal: ("'" STRING_SIMPLE_VALUE "'") | ("'''" STRING_MULTILINE_VALUE "'''") +STRING_MULTILINE_VALUE: \.*?(''')\ +STRING_SIMPLE_VALUE: \.*?(? + +// +// main is where all program execution starts +// +int main(int argc, char **argv) { + printf("Hello there.\n"); + return 0; +} +``` + +Then we create a Meson build description and put it in a file called +`meson.build` in the same directory. Its contents are the following. + +```meson +project('tutorial', 'c') +executable('demo', 'main.c') +``` + +That is all. Note that unlike Autotools you [do not need to add any +source headers to the list of +sources](FAQ.md#do-i-need-to-add-my-headers-to-the-sources-list-like-in-autotools). + +We are now ready to build our application. First we need +to initialize the build by going into the source directory and issuing +the following commands. + +```console +$ meson setup builddir +``` + +We create a separate build directory to hold all of the compiler +output. Meson is different from some other build systems in that it +does not permit in-source builds. You must always create a separate +build directory. Common convention is to put the default build +directory in a subdirectory of your top level source directory. + +When Meson is run it prints the following output. + + The Meson build system + version: 0.13.0-research + Source dir: /home/jpakkane/mesontutorial + Build dir: /home/jpakkane/mesontutorial/builddir + Build type: native build + Project name is "tutorial". + Using native c compiler "ccache cc". (gcc 4.8.2) + Creating build target "demo" with 1 files. + +Now we are ready to build our code. + + +```console +$ cd builddir +$ ninja +``` + +If your Meson version is newer than 0.55.0, you can use the new +backend-agnostic build command: + +```console +$ cd builddir +$ meson compile +``` + +For the rest of this document we are going to use the latter form. + +Once the executable is built we can run it. + +```console +$ ./demo +``` + +This produces the expected output. + + Hello there. + +Adding dependencies +----- + +Just printing text is a bit old fashioned. Let's update our program to +create a graphical window instead. We'll use the +[GTK+](https://gtk.org) widget toolkit. First we edit the main file to +use GTK+. The new version looks like this. + +```c + +#include + +// +// Should provided the active view for a GTK application +// +static void activate(GtkApplication* app, gpointer user_data) +{ + GtkWidget *window; + GtkWidget *label; + + window = gtk_application_window_new (app); + label = gtk_label_new("Hello GNOME!"); + gtk_container_add (GTK_CONTAINER (window), label); + gtk_window_set_title(GTK_WINDOW (window), "Welcome to GNOME"); + gtk_window_set_default_size(GTK_WINDOW (window), 200, 100); + gtk_widget_show_all(window); +} // end of function activate + +// +// main is where all program execution starts +// +int main(int argc, char **argv) +{ + GtkApplication *app; + int status; + + app = gtk_application_new(NULL, G_APPLICATION_FLAGS_NONE); + g_signal_connect(app, "activate", G_CALLBACK(activate), NULL); + status = g_application_run(G_APPLICATION(app), argc, argv); + g_object_unref(app); + + return status; +} // end of function main +``` + +Then we edit the Meson file, instructing it to find and use the GTK+ +libraries. + +```meson +project('tutorial', 'c') +gtkdep = dependency('gtk+-3.0') +executable('demo', 'main.c', dependencies : gtkdep) +``` + +If your app needs to use multiple libraries, you need to use separate +[`dependency()`](Reference-manual.md#dependency) calls for each, like so: + +```meson +gtkdeps = [dependency('gtk+-3.0'), dependency('gtksourceview-3.0')] +``` + +We don't need it for the current example. + +Now we are ready to build. The thing to notice is that we do *not* +need to recreate our build directory, run any sort of magical commands +or the like. Instead we just type the exact same command as if we were +rebuilding our code without any build system changes. + +```console +$ meson compile +``` + +Once you have set up your build directory the first time, you don't +ever need to run the `meson` command again. You always just run `meson +compile`. Meson will automatically detect when you have done changes +to build definitions and will take care of everything so users don't +have to care. In this case the following output is produced. + + [1/1] Regenerating build files + The Meson build system + version: 0.13.0-research + Source dir: /home/jpakkane/mesontutorial + Build dir: /home/jpakkane/mesontutorial/builddir + Build type: native build + Project name is "tutorial". + Using native c compiler "ccache cc". (gcc 4.8.2) + Found pkg-config version 0.26. + Dependency gtk+-3.0 found: YES + Creating build target "demo" with 1 files. + [1/2] Compiling c object demo.dir/main.c.o + [2/2] Linking target demo + +Note how Meson noticed that the build definition has changed and reran +itself automatically. The program is now ready to be run: + +``` +$ ./demo +``` + +This creates the following GUI application. + +![GTK+ sample application screenshot](images/gtksample.png) diff --git a/meson/docs/markdown/Unit-tests.md b/meson/docs/markdown/Unit-tests.md new file mode 100644 index 000000000..5233a4fe8 --- /dev/null +++ b/meson/docs/markdown/Unit-tests.md @@ -0,0 +1,270 @@ +--- +short-description: Meson's own unit-test system +... + +# Unit tests + +Meson comes with a fully functional unit test system. To use it simply +build an executable and then use it in a test. + +```meson +e = executable('prog', 'testprog.c') +test('name of test', e) +``` + +You can add as many tests as you want. They are run with the command `meson +test`. + +Meson captures the output of all tests and writes it in the log file +`meson-logs/testlog.txt`. + +## Test parameters + +Some tests require the use of command line arguments or environment +variables. These are simple to define. + +```meson +test('command line test', exe, args : ['first', 'second']) +test('envvar test', exe2, env : ['key1=value1', 'key2=value2']) +``` + +Note how you need to specify multiple values as an array. + +### MALLOC_PERTURB_ + +By default, environment variable +[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) is +set to a random value between 1..255. This can help find memory leaks on +configurations using glibc, including with non-GCC compilers. This feature +can be disabled as discussed in [test()](Reference-manual.md#test). + +## Coverage + +If you enable coverage measurements by giving Meson the command line +flag `-Db_coverage=true`, you can generate coverage reports after +running the tests (running the tests is required to gather the list of +functions that get called). Meson will autodetect what coverage +generator tools you have installed and will generate the corresponding +targets. These targets are `coverage-xml` and `coverage-text` which +are both provided by [Gcovr](http://gcovr.com) (version 3.3 or higher) +and `coverage-html`, which requires +[Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and +[GenHTML](https://linux.die.net/man/1/genhtml) or +[Gcovr](http://gcovr.com). As a convenience, a high-level `coverage` +target is also generated which will produce all 3 coverage report +types, if possible. + +The output of these commands is written to the log directory `meson-logs` in +your build directory. + +## Parallelism + +To reduce test times, Meson will by default run multiple unit tests in +parallel. It is common to have some tests which can not be run in +parallel because they require unique hold on some resource such as a +file or a D-Bus name. You have to specify these tests with a keyword +argument. + +```meson +test('unique test', t, is_parallel : false) +``` + +Meson will then make sure that no other unit test is running at the +same time. Non-parallel tests take longer to run so it is recommended +that you write your unit tests to be parallel executable whenever +possible. + +By default Meson uses as many concurrent processes as there are cores +on the test machine. You can override this with the environment +variable `MESON_TESTTHREADS` like this. + +```console +$ MESON_TESTTHREADS=5 meson test +``` + +## Priorities + +*(added in version 0.52.0)* + +Tests can be assigned a priority that determines when a test is +*started*. Tests with higher priority are started first, tests with +lower priority started later. The default priority is 0, Meson makes +no guarantee on the ordering of tests with identical priority. + +```meson +test('started second', t, priority : 0) +test('started third', t, priority : -50) +test('started first', t, priority : 1000) +``` + +Note that the test priority only affects the starting order of tests +and subsequent tests are affected by how long it takes previous tests +to complete. It is thus possible that a higher-priority test is still +running when lower-priority tests with a shorter runtime have +completed. + +## Skipped tests and hard errors + +Sometimes a test can only determine at runtime that it can not be run. + +For the default `exitcode` testing protocol, the GNU standard approach +in this case is to exit the program with error code 77. Meson will +detect this and report these tests as skipped rather than failed. This +behavior was added in version 0.37.0. + +For TAP-based tests, skipped tests should print a single line starting +with `1..0 # SKIP`. + +In addition, sometimes a test fails set up so that it should fail even +if it is marked as an expected failure. The GNU standard approach in +this case is to exit the program with error code 99. Again, Meson will +detect this and report these tests as `ERROR`, ignoring the setting of +`should_fail`. This behavior was added in version 0.50.0. + +## Testing tool + +The goal of the Meson test tool is to provide a simple way to run +tests in a variety of different ways. The tool is designed to be run +in the build directory. + +The simplest thing to do is just to run all tests. + +```console +$ meson test +``` + +### Run subsets of tests + +For clarity, consider the meson.build containing: + +```meson + +test('A', ..., suite: 'foo') +test('B', ..., suite: ['foo', 'bar']) +test('C', ..., suite: 'bar') +test('D', ..., suite: 'baz') + +``` + +Specify test(s) by name like: + +```console +$ meson test A D +``` + +Tests belonging to a suite `suite` can be run as follows + +```console +$ meson test --suite (sub)project_name:suite +``` + +Since version *0.46*, `(sub)project_name` can be omitted if it is the +top-level project. + +Multiple suites are specified like: + +```console +$ meson test --suite foo --suite bar +``` + +NOTE: If you choose to specify both suite(s) and specific test +name(s), the test name(s) must be contained in the suite(s). This +however is redundant-- it would be more useful to specify either +specific test names or suite(s). + +### Other test options + +Sometimes you need to run the tests multiple times, which is done like this: + +```console +$ meson test --repeat=10 +``` + +Invoking tests via a helper executable such as Valgrind can be done with the +`--wrap` argument + +```console +$ meson test --wrap=valgrind testname +``` + +Arguments to the wrapper binary can be given like this: + +```console +$ meson test --wrap='valgrind --tool=helgrind' testname +``` + +Meson also supports running the tests under GDB. Just doing this: + +```console +$ meson test --gdb testname +``` + +Meson will launch `gdb` all set up to run the test. Just type `run` in +the GDB command prompt to start the program. + +The second use case is a test that segfaults only rarely. In this case +you can invoke the following command: + +```console +$ meson test --gdb --repeat=10000 testname +``` + +This runs the test up to 10 000 times under GDB automatically. If the +program crashes, GDB will halt and the user can debug the application. +Note that testing timeouts are disabled in this case so `meson test` +will not kill `gdb` while the developer is still debugging it. The +downside is that if the test binary freezes, the test runner will wait +forever. + +Sometimes, the GDB binary is not in the PATH variable or the user +wants to use a GDB replacement. Therefore, the invoked GDB program can +be specified *(added 0.52.0)*: + +```console +$ meson test --gdb --gdb-path /path/to/gdb testname +``` + +```console +$ meson test --print-errorlogs +``` + +Meson will report the output produced by the failing tests along with +other useful information as the environmental variables. This is +useful, for example, when you run the tests on Travis-CI, Jenkins and +the like. + +For further information see the command line help of Meson by running +`meson test -h`. + +## Legacy notes + +If `meson test` does not work for you, you likely have a old version +of Meson. In that case you should call `mesontest` instead. If +`mesontest` doesn't work either you have a very old version prior to +0.37.0 and should upgrade. + +## Test outputs + +Meson will write several different files with detailed results of +running tests. These will be written into $builddir/meson-logs/ + +### testlog.json + +This is not a proper json file, but a file containing one valid json +object per line. This is file is designed so each line is streamed out +as each test is run, so it can be read as a stream while the test +harness is running + +### testlog.junit.xml + +This is a valid JUnit XML description of all tests run. It is not +streamed out, and is written only once all tests complete running. + +When tests use the `tap` protocol each test will be recorded as a +testsuite container, with each case named by the number of the result. + +When tests use the `gtest` protocol Meson will inject arguments to the +test to generate it's own JUnit XML, which Meson will include as part +of this XML file. + +*New in 0.55.0* diff --git a/meson/docs/markdown/Unity-builds.md b/meson/docs/markdown/Unity-builds.md new file mode 100644 index 000000000..833383d88 --- /dev/null +++ b/meson/docs/markdown/Unity-builds.md @@ -0,0 +1,39 @@ +--- +short-description: Unity builds are a technique for reducing build times +... + +# Unity builds + +Unity builds are a technique for cutting down build times. The way it +works is relatively straightforward. Suppose we have source files +`src1.c`, `src2.c` and `src3.c`. Normally we would run the compiler +three times, once for each file. In a unity build we instead compile +all these sources in a single unit. The simplest approach is to create +a new source file that looks like this. + +```c +#include +#include +#include +``` + +This is then compiled rather than the individual files. The exact +speedup depends on the code base, of course, but it is not uncommon to +obtain more than 50% speedup in compile times. This happens even +though the Unity build uses only one CPU whereas individual compiles +can be run in parallel. Unity builds can also lead to faster code, +because the compiler can do more aggressive optimizations (e.g. +inlining). + +The downside is that incremental builds are as slow as full rebuilds +(because that is what they are). Unity compiles also use more memory, +which may become an issue in certain scenarios. There may also be some +bugs in the source that need to be fixed before Unity compiles work. +As an example, if both `src1.c` and `src2.c` contain a static function +or variable of the same name, there will be a clash. + +Meson has built-in support for unity builds. To enable them, just pass +`--unity on` on the command line or enable unity builds with the GUI. +No code changes are necessary apart from the potential clash issue +discussed above. Meson will automatically generate all the necessary +inclusion files for you. diff --git a/meson/docs/markdown/Use-of-Python.md b/meson/docs/markdown/Use-of-Python.md new file mode 100644 index 000000000..e69a39791 --- /dev/null +++ b/meson/docs/markdown/Use-of-Python.md @@ -0,0 +1,48 @@ +# Use of Python + +Meson is implemented in Python. This has both positive and negative +sides. The main thing people seem to be mindful about is the +dependency on Python to build source code. This page discusses various +aspects of this problem. + +# Dependency hell + +There have been many Python programs that are difficult to maintain on +multiple platforms. The reasons come mostly from dependencies. The +program may use dependencies that are hard to compile on certain +platforms, are outdated, conflict with other dependencies, not +available on a given Python version and so on. + +Meson avoids dependency problems with one simple rule: Meson is not +allowed to have any dependencies outside the Python basic library. The +only thing you need is Python 3 (and possibly Ninja). + +## Reimplementability + +Meson has been designed in such a way that the implementation language +is never exposed in the build definitions. This makes it possible (and +maybe even easy) to reimplement Meson in any other programming +language. There are currently no plans to reimplement Meson, but we +will make sure that Python is not exposed inside the build +definitions. + +## Cross platform tooling + +There is no one technical solution or programming language that works +natively on all operating systems currently in use. When Autotools was +designed in the late 80s, Unix shell was available pretty much +anywhere. This is no longer the case. + +It is also the case that as any project gets larger, sooner or later +it requires code generation, scripting or other tooling. This seems to +be inevitable. Because there is no scripting language that would be +available everywhere, these tools either need to be rewritten for each +platform (which is a lot of work and is prone to errors) or the +project needs to take a dependency on _something_. + +Any project that uses Meson (at least the current version) can rely on +the fact that Python 3 will always be available, because you can't +compile the project without it. All tooling can then be done in Python +3 with the knowledge that it will run on any platform without any +extra dependencies (modulo the usual porting work). This reduces +maintenance effort on multiplatform projects by a fair margin. diff --git a/meson/docs/markdown/Users.md b/meson/docs/markdown/Users.md new file mode 100644 index 000000000..032ec1ed3 --- /dev/null +++ b/meson/docs/markdown/Users.md @@ -0,0 +1,158 @@ +--- +title: Users +... + +# List of projects using Meson + +If you have a project that uses Meson that you want to add to this +list, please [file a +pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) +for it. All the software on this list is tested for regressions before +release, so it's highly recommended that projects add themselves +here. Some additional projects are listed in the [`meson` GitHub +topic](https://github.com/topics/meson). + + - [2048.cpp](https://github.com/plibither8/2048.cpp), a fully featured terminal version of the game "2048" written in C++ + - [Aravis](https://github.com/AravisProject/aravis), a glib/gobject based library for video acquisition using Genicam cameras + - [Akira](https://github.com/akiraux/Akira), a native Linux app for UI and UX design built in Vala and Gtk + - [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3 + - [Arduino sample project](https://github.com/jpakkane/mesonarduino) + - [bolt](https://gitlab.freedesktop.org/bolt/bolt), userspace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux + - [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop), a desktop environment built on GNOME technologies + - [Bzip2](https://gitlab.com/federicomenaquintero/bzip2), the bzip2 compressor/decompressor + - [Cage](https://github.com/Hjdskes/cage), a Wayland kiosk + - [canfigger](https://github.com/andy5995/canfigger), simple configuration file parser library + - [casync](https://github.com/systemd/casync), Content-Addressable Data Synchronization Tool + - [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop), the cinnamon desktop library + - [Cozy](https://github.com/geigi/cozy), a modern audio book player for Linux and macOS using GTK+ 3 + - [dav1d](https://code.videolan.org/videolan/dav1d), an AV1 decoder + - [dbus-broker](https://github.com/bus1/dbus-broker), Linux D-Bus Message Broker + - [DOSBox Staging](https://github.com/dosbox-staging/dosbox-staging), DOS/x86 emulator + - [DPDK](http://dpdk.org/browse/dpdk), Data Plane Development Kit, a set of libraries and drivers for fast packet processing + - [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine + - [elementary OS](https://github.com/elementary/), Linux desktop oriented distribution + - [Emeus](https://github.com/ebassi/emeus), constraint based layout manager for GTK+ + - [Entangle](https://entangle-photo.org/), tethered camera control and capture desktop application + - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson), sample project for using the ESP8266 Arduino port with Meson + - [FeedReader](https://github.com/jangernert/FeedReader), a modern desktop application designed to complement existing web-based RSS accounts + - [Flecs](https://github.com/SanderMertens/flecs), a Fast and Lightweight ECS (Entity Component System) C library + - [Foliate](https://github.com/johnfactotum/foliate), a simple and modern GTK eBook reader, built with GJS and Epub.js + - [Fractal](https://wiki.gnome.org/Apps/Fractal/), a Matrix messaging client for GNOME + - [Frida](https://github.com/frida/frida-core), a dynamic binary instrumentation toolkit + - [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware + - [GameMode](https://github.com/FeralInteractive/gamemode), a daemon/lib combo for Linux that allows games to request a set of optimisations be temporarily applied to the host OS + - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop. + - [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer + - [Glorytun](https://github.com/angt/glorytun), a multipath UDP tunnel + - [GNOME Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a GNOME hypervisor + - [GNOME Builder](https://gitlab.gnome.org/GNOME/gnome-builder), an IDE for the GNOME platform + - [GNOME MPV](https://github.com/gnome-mpv/gnome-mpv), GNOME frontend to the mpv video player + - [GNOME Recipes](https://gitlab.gnome.org/GNOME/recipes), application for cooking recipes + - [GNOME Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for GNOME + - [GNOME Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on GNOME desktop + - [GNOME Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a GNOME application for visualizing system resources + - [GNU FriBidi](https://github.com/fribidi/fribidi), the open source implementation of the Unicode Bidirectional Algorithm + - [Graphene](https://ebassi.github.io/graphene/), a thin type library for graphics + - [Grilo](https://git.gnome.org/browse/grilo) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins), the Grilo multimedia framework + - [GStreamer](https://gitlab.freedesktop.org/gstreamer/gstreamer), multimedia framework + - [GTK+](https://gitlab.gnome.org/GNOME/gtk), the multi-platform toolkit used by GNOME + - [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D + - [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO + - [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux + - [HarfBuzz](https://github.com/harfbuzz/harfbuzz), a text shaping engine + - [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system + - [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C + - [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite + - [inih](https://github.com/benhoyt/inih) (INI Not Invented Here), a small and simple .INI file parser written in C + - [Irssi](https://github.com/irssi/irssi), a terminal chat client in C + - [iSH](https://github.com/tbodt/ish), Linux shell for iOS + - [Janet](https://github.com/janet-lang/janet), a functional and imperative programming language and bytecode interpreter + - [json](https://github.com/nlohmann/json), JSON for Modern C++ + - [JsonCpp](https://github.com/open-source-parsers/jsoncpp), a C++ library for interacting with JSON + - [Json-glib](https://gitlab.gnome.org/GNOME/json-glib), GLib-based JSON manipulation library + - [Kiwix libraries](https://github.com/kiwix/kiwix-lib) + - [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation + - [Ksh](https://github.com/att/ast), a Korn Shell + - [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network + - [Le](https://github.com/kirushyk/le), machine learning framework + - [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android + - [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces + - [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one + - [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management + - [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface + - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2 + - [libglvnd](https://gitlab.freedesktop.org/glvnd/libglvnd), Vendor neutral OpenGL dispatch library for Unix-like OSes + - [Libhttpseverywhere](https://git.gnome.org/browse/libhttpseverywhere), a library to enable httpseverywhere on any desktop app + - [libmodulemd](https://github.com/fedora-modularity/libmodulemd), a GObject Introspected library for managing [Fedora Project](https://getfedora.org/) module metadata. + - [Libosmscout](https://github.com/Framstag/libosmscout), a C++ library for offline map rendering, routing and location +lookup based on OpenStreetMap data + - [libratbag](https://github.com/libratbag/libratbag), provides a DBus daemon to configure input devices, mainly gaming mice. + - [libspng](https://github.com/randy408/libspng), a C library for reading and writing Portable Network Graphics (PNG) +format files + - [libui](https://github.com/andlabs/libui), a simple and portable (but not inflexible) GUI library in C that uses the native GUI technologies of each platform it supports + - [Libva](https://github.com/intel/libva), an implementation for the VA (VIdeo Acceleration) API + - [Libvirt](https://libvirt.org), a toolkit to manage virtualization platforms + - [Libzim](https://github.com/openzim/libzim), the reference implementation for the ZIM file format + - [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor + - [Mesa](https://gitlab.freedesktop.org/mesa/mesa/), an open source graphics driver project + - [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via Wifi-Display specification aka Miracast + - [mrsh](https://github.com/emersion/mrsh), a minimal POSIX shell + - [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager + - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment + - [OcherBook](https://github.com/ccoffing/OcherBook), an open source book reader for Kobo devices + - [oomd](https://github.com/facebookincubator/oomd), a userspace Out-Of-Memory (OOM) killer for Linux systems + - [OpenH264](https://github.com/cisco/openh264), open source H.264 codec + - [OpenHMD](https://github.com/OpenHMD/OpenHMD), a free and open source API and drivers for immersive technology, such as head mounted displays with built in head tracking + - [OpenTitan](https://github.com/lowRISC/opentitan), an open source silicon Root of Trust (RoT) project. + - [Orc](https://gitlab.freedesktop.org/gstreamer/orc), the Optimized Inner Loop Runtime Compiler + - [OTS](https://github.com/khaledhosny/ots), the OpenType Sanitizer, parses and serializes OpenType files (OTF, TTF) and WOFF and WOFF2 font files, validating and sanitizing them as it goes. Used by Chromium and Firefox + - [Outlier](https://github.com/kerolasa/outlier), a small Hello World style Meson example project + - [p11-kit](https://github.com/p11-glue/p11-kit), PKCS#11 module aggregator + - [Pacman](https://gitlab.archlinux.org/pacman/pacman.git), a package manager for Arch Linux + - [Pango](https://git.gnome.org/browse/pango/), an Internationalized text layout and rendering library + - [Parzip](https://github.com/jpakkane/parzip), a multithreaded reimplementation of Zip + - [Peek](https://github.com/phw/peek), simple animated GIF screen recorder with an easy to use interface + - [PicoLibc](https://github.com/keith-packard/picolibc), a standard C library for small embedded systems with limited RAM + - [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications + - [Paper Rock Scissors](https://github.com/michaelbrockus/paper_rock_scissors), a game with weapons themed at home paper rock scissors style. + - [Pithos](https://github.com/pithos/pithos), a Pandora Radio client + - [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor + - [Playerctl](https://github.com/acrisci/playerctl), mpris command-line controller and library for spotify, vlc, audacious, bmp, cmus, and others + - [Polari](https://gitlab.gnome.org/GNOME/polari), an IRC client + - [qboot](https://github.com/bonzini/qboot), a minimal x86 firmware for booting Linux kernels + - [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default) + - [rmw](https://remove-to-waste.info), safe-remove utility for the command line + - [Rizin](https://rizin.re), Free and Open Source Reverse Engineering Framework + - [QEMU](https://qemu.org), a processor emulator and virtualizer + - [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock) + - [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP + - [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk + - [Siril](https://gitlab.com/free-astro/siril), an image processing software for amateur astronomy + - [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP + - [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor + - [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool + - [systemd](https://github.com/systemd/systemd), the init system + - [szl](https://github.com/dimkr/szl), a lightweight, embeddable scripting language + - [Taisei Project](https://taisei-project.org/), an open-source Touhou Project clone and fangame + - [Terminology](https://github.com/billiob/terminology), a terminal emulator based on the Enlightenment Foundation Libraries + - [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3 + - [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices + - [UFJF-MLTK](https://github.com/mateus558/UFJF-Machine-Learning-Toolkit), A C++ cross-platform framework for machine learning algorithms development and testing + - [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages + - [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala + - [Venom](https://github.com/naxuroqa/Venom), a modern Tox client for the GNU/Linux desktop + - [VMAF](https://github.com/Netflix/vmaf) (by Netflix), a perceptual video quality assessment based on multi-method fusion + - [Wayland](https://github.com/wayland-project/wayland) and [Weston](https://github.com/wayland-project/weston), a next generation display server + - [wlroots](https://github.com/swaywm/wlroots), a modular Wayland compositor library + - [wxFormBuilder](https://github.com/wxFormBuilder/wxFormBuilder), RAD tool for wxWidgets GUI design + - [xi-gtk](https://github.com/eyelash/xi-gtk), a GTK+ front-end for the Xi editor + - [Xorg](https://gitlab.freedesktop.org/xorg/xserver), the X.org display server (not the default yet) + - [X Test Suite](https://gitlab.freedesktop.org/xorg/test/xts), The X.org test suite + - [zathura](https://github.com/pwmt/zathura), a highly customizable and functional document viewer based on the +girara user interface library and several document libraries + - [Zrythm](https://git.zrythm.org/cgit/zrythm), a cross-platform digital audio workstation using GTK+ 3 + - [ZStandard](https://github.com/facebook/zstd/commit/4dca56ed832c6a88108a2484a8f8ff63d8d76d91), a compression algorithm developed at Facebook (not used by default) + +Note that a more up-to-date list of GNOME projects that use Meson can +be found +[here](https://wiki.gnome.org/Initiatives/GnomeGoals/MesonPorting). diff --git a/meson/docs/markdown/Using-multiple-build-directories.md b/meson/docs/markdown/Using-multiple-build-directories.md new file mode 100644 index 000000000..557f3444d --- /dev/null +++ b/meson/docs/markdown/Using-multiple-build-directories.md @@ -0,0 +1,93 @@ +# Using multiple build directories + +One of the main design goals of Meson has been to build all projects +out-of-source. This means that *all* files generated during the build +are placed in a separate subdirectory. This goes against common Unix +tradition where you build your projects in-source. Building out of +source gives two major advantages. + +First of all this makes for very simple `.gitignore` files. In +classical build systems you may need to have tens of lines of +definitions, most of which contain wildcards. When doing out of source +builds all of this busywork goes away. A common ignore file for a +Meson project only contains a few lines that are the build directory +and IDE project files. + +Secondly this makes it very easy to clean your projects: just delete +the build subdirectory and you are done. There is no need to guess +whether you need to run `make clean`, `make distclean`, `make +mrproper` or something else. When you delete a build subdirectory +there is no possible way to have any lingering state from your old +builds. + +The true benefit comes from somewhere else, though. + +## Multiple build directories for the same source tree + +Since a build directory is fully self contained and treats the source +tree as a read-only piece of data, it follows that you can have +arbitrarily many build trees for any source tree at the same time. +Since all build trees can have different configuration, this is +extremely powerful. Now you might be wondering why one would want to +have multiple build setups at the same time. Let's examine this by +setting up a hypothetical project. + +The first thing to do is to set up the default build, that is, the one +we are going to use over 90% of the time. In this we use the system +compiler and build with debug enabled and no optimizations so it +builds as fast as possible. This is the default project type for +Meson, so setting it up is simple. + + mkdir builddir + meson builddir + +Another common setup is to build with debug and optimizations to, for +example, run performance tests. Setting this up is just as simple. + + mkdir buildopt + meson --buildtype=debugoptimized buildopt + +For systems where the default compiler is GCC, we would like to +compile with Clang, too. So let's do that. + + mkdir buildclang + CC=clang CXX=clang++ meson buildclang + +You can add cross builds, too. As an example, let's set up a Linux -> +Windows cross compilation build using MinGW. + + mkdir buildwine + meson --cross-file=mingw-cross.txt buildwine + +The cross compilation file sets up Wine so that not only can you +compile your application, you can also run the unit test suite just by +issuing the command `meson test`. + +To compile any of these build types, just cd into the corresponding +build directory and run `meson compile` or instruct your IDE to do the +same. Note that once you have set up your build directory once, you +can just run Ninja and Meson will ensure that the resulting build is +fully up to date according to the source. Even if you have not touched +one of the directories in weeks and have done major changes to your +build configuration, Meson will detect this and bring the build +directory up to date (or print an error if it can't do that). This +allows you to do most of your work in the default directory and use +the others every now and then without having to babysit your build +directories. + +## Specialized uses + +Separate build directories allows easy integration for various +different kinds of tools. As an example, Clang comes with a static +analyzer. It is meant to be run from scratch on a given source tree. +The steps to run it with Meson are very simple. + + rm -rf buildscan + mkdir buildscan + scan-build meson buildscan + cd buildscan + scan-build ninja + +These commands are the same for every single Meson project, so they +could even be put in a script turning static analysis into a single +command. diff --git a/meson/docs/markdown/Using-the-WrapDB.md b/meson/docs/markdown/Using-the-WrapDB.md new file mode 100644 index 000000000..baccfdc17 --- /dev/null +++ b/meson/docs/markdown/Using-the-WrapDB.md @@ -0,0 +1,47 @@ +# Using the WrapDB + +The Wrap database is a web service that provides Meson build +definitions to projects that do not have it natively. Using it is +simple. The service can be found +[here](https://wrapdb.mesonbuild.com). + +The front page lists all projects that are on the service. Select the +one you want and click it. The detail page lists available versions by +branch and revision id. The branch names come from upstream releases +and revision ids are version numbers internal to the database. +Whenever the packaging is updated a new revision is released to the +service a new revision with a bigger revision id is added. Usually you +want to select the newest branch with the highest revision id. + +You can get the actual wrap file which tells Meson how to fetch the +project by clicking on the download link on the page. As an example, +the wrap file for [zlib-1.2.8, revision +4](https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.8/4/get_wrap) +looks like this. You can find detailed documentation about it in [the +Wrap manual](Wrap-dependency-system-manual.md). + + [wrap-file] + directory = zlib-1.2.8 + + source_url = http://zlib.net/zlib-1.2.8.tar.gz + source_filename = zlib-1.2.8.tar.gz + source_hash = 36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d + + patch_url = https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.8/4/get_zip + patch_filename = zlib-1.2.8-4-wrap.zip + patch_hash = 2327a42c8f73a4289ee8c9cd4abc43b324d0decc28d6e609e927f0a50321af4a + +Add this file to your project with the name `subprojects/zlib.wrap`. +Then you can use it in your `meson.build` file with this directive: + + zproj = subproject('zlib') + +When Meson encounters this it will automatically download, unpack and +patch the source files. + +## Contributing build definitions + +The contents of the Wrap database are tracked in git repos of the +[Mesonbuild project](https://github.com/mesonbuild). The actual +process is simple and described in [submission +documentation](Adding-new-projects-to-wrapdb.md). diff --git a/meson/docs/markdown/Using-with-Visual-Studio.md b/meson/docs/markdown/Using-with-Visual-Studio.md new file mode 100644 index 000000000..2680e8baf --- /dev/null +++ b/meson/docs/markdown/Using-with-Visual-Studio.md @@ -0,0 +1,47 @@ +--- +short-description: How to use Meson in Visual Studio +... + +# Using with Visual Studio + +In order to generate Visual Studio projects, Meson needs to know the +settings of your installed version of Visual Studio. The only way to +get this information is to run Meson under the Visual Studio Command +Prompt. + +You can always find the Visual Studio Command Prompt by searching from +the Start Menu. However, the name is different for each Visual Studio +version. With Visual Studio 2019, look for "x64 Native Tools Command +Prompt for VS 2019". The next steps are [the same as +always](https://mesonbuild.com/Running-Meson.html#configuring-the-build-directory): + +1. `cd` into your source directory +1. `meson setup builddir`, which will create and setup the build directory +1. `meson compile -C builddir`, to compile your code. You can also use `ninja -C builddir` here if you are using the default Ninja backend. + +If you wish to generate Visual Studio project files, pass `--backend +vs`. At the time of writing the Ninja backend is more mature than the +VS backend so you might want to use it for serious work. + +# Using Clang-CL with Visual Studio + +*(new in 0.52.0)* + +You will first need to get a copy of llvm+clang for Windows, such versions +are available from a number of sources, including the llvm website. Then you +will need the [llvm toolset extension for visual +studio](https://marketplace.visualstudio.com/items?itemName=LLVMExtensions.llvm-toolchain). +You then need to either use a [native file](Native-environments.md#binaries) +or `set CC=clang-cl`, and `set CXX=clang-cl` to use those compilers, Meson +will do the rest. + +This only works with visual studio 2017 and 2019. + +There is currently no support in Meson for clang/c2. + +# Using Intel-CL (ICL) with Visual Studio + +*(new in 0.52.0)* + +To use ICL you need only have ICL installed and launch an ICL development +shell like you would for the ninja backend and Meson will take care of it. diff --git a/meson/docs/markdown/Using-wraptool.md b/meson/docs/markdown/Using-wraptool.md new file mode 100644 index 000000000..ffa8309cf --- /dev/null +++ b/meson/docs/markdown/Using-wraptool.md @@ -0,0 +1,78 @@ +# Using wraptool + +Wraptool is a subcommand of Meson that allows you to manage your +source dependencies using the WrapDB database. It gives you all things +you would expect, such as installing and updating dependencies. The +wrap tool works on all platforms, the only limitation is that the wrap +definition works on your target platform. If you find some Wraps that +don't work, please file bugs or, even better, patches. + +All code examples here assume that you are running the commands in +your top level source directory. Lines that start with the `$` mark +are commands to type. + +## Simple querying + +The simplest operation to do is to query the list of packages +available. To list them all issue the following command: + + $ meson wrap list + box2d + enet + gtest + libjpeg + liblzma + libpng + libxml2 + lua + ogg + sqlite + vorbis + zlib + +Usually you want to search for a specific package. This can be done +with the `search` command: + + $ meson wrap search jpeg + libjpeg + +To determine which versions of libjpeg are available to install, issue +the `info` command: + + $ meson wrap info libjpeg + Available versions of libjpeg: + 9a 2 + +The first number is the upstream release version, in this case +`9a`. The second number is the Wrap revision number. They don't relate +to anything in particular, but larger numbers imply newer +releases. You should always use the newest available release. + +## Installing dependencies + +Installing dependencies is just as straightforward. First just create +the `subprojects` directory at the top of your source tree and issue +the install command. + + $ meson wrap install libjpeg + Installed libjpeg branch 9a revision 2 + +Now you can issue a `subproject('libjpeg')` in your `meson.build` file +to use it. + +To check if your projects are up to date you can issue the `status` command. + + $ meson wrap status + Subproject status + libjpeg up to date. Branch 9a, revision 2. + zlib not up to date. Have 1.2.8 2, but 1.2.8 4 is available. + +In this case `zlib` has a newer release available. Updating it is +straightforward: + + $ meson wrap update zlib + Updated zlib to branch 1.2.8 revision 4 + +Wraptool can do other things besides these. Documentation for these +can be found in the command line help, which can be accessed by +`meson wrap --help`. diff --git a/meson/docs/markdown/Vala.md b/meson/docs/markdown/Vala.md new file mode 100644 index 000000000..d3edce0e2 --- /dev/null +++ b/meson/docs/markdown/Vala.md @@ -0,0 +1,350 @@ +--- +title: Vala +short-description: Compiling Vala and Genie programs +... + +# Compiling Vala applications and libraries +Meson supports compiling applications and libraries written in +[Vala](https://vala-project.org/) and +[Genie](https://wiki.gnome.org/Projects/Genie) . A skeleton `meson.build` file: + +```meson +project('vala app', 'vala', 'c') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` + +You must always specify the `glib-2.0` and `gobject-2.0` libraries as +dependencies, because all current Vala applications use them. +[GLib](https://developer.gnome.org/glib/stable/) is used for basic data types +and [GObject](https://developer.gnome.org/gobject/stable/) is used for the +runtime type system. + + +## Using libraries + +Meson uses the [`dependency()`](Reference-manual.md#dependency) +function to find the relevant VAPI, C headers and linker flags when it +encounters a Vala source file in a build target. Vala needs a VAPI +file and a C header or headers to use a library. The VAPI file helps +map Vala code to the library's C programming interface. It is the +[`pkg-config`](https://www.freedesktop.org/wiki/Software/pkg-config/) +tool that makes finding these installed files all work seamlessly +behind the scenes. When a `pkg-config` file doesn't exist for the +library then the [`find_library()`](Reference-manual.md#find_library) +method of the [compiler object](Reference-manual.md#compiler-object) +needs to be used. Examples are given later. + +Note Vala uses libraries that follow the C Application Binary Interface (C ABI). +The library, however, could be written in C, Vala, Rust, Go, C++ or any other +language that can generate a binary compatible with the C ABI and so provides C +headers. + + +### The simplest case +This first example is a simple addition to the `meson.build` file because: + + * the library has a `pkg-config` file, `gtk+-3.0.pc` + * the VAPI is distributed with Vala and so installed with the Vala compiler + * the VAPI is installed in Vala's standard search path + * the VAPI, `gtk+-3.0.vapi`, has the same name as the `pkg-config` file + +Everything works seamlessly in the background and only a single extra line is +needed: + +```meson +project('vala app', 'vala', 'c') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), + dependency('gtk+-3.0'), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` + +GTK+ is the graphical toolkit used by GNOME, elementary OS and other +desktop environments. The binding to the library, the VAPI file, is +distributed with Vala. + +Other libraries may have a VAPI that is distributed with the library +itself. Such libraries will have their VAPI file installed along with +their other development files. The VAPI is installed in Vala's +standard search path and so works just as seamlessly using the +`dependency()` function. + + +### Targeting a version of GLib + +Meson's [`dependency()`](Reference-manual.md#dependency) function +allows a version check of a library. This is often used to check a +minimum version is installed. When setting a minimum version of GLib, +Meson will also pass this to the Vala compiler using the +`--target-glib` option. + +This is needed when using GTK+'s user interface definition files with +Vala's `[GtkTemplate]`, `[GtkChild]` and `[GtkCallback]` attributes. +This requires `--target-glib 2.38`, or a newer version, to be passed +to Vala. With Meson this is simply done with: + +```meson +project('vala app', 'vala', 'c') + +dependencies = [ + dependency('glib-2.0', version: '>=2.38'), + dependency('gobject-2.0'), + dependency('gtk+-3.0'), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` + +Using `[GtkTemplate]` also requires the GTK+ user interface definition +files to be built in to the binary as GResources. For completeness, +the next example shows this: + +```meson +project('vala app', 'vala', 'c') + +dependencies = [ + dependency('glib-2.0', version: '>=2.38'), + dependency('gobject-2.0'), + dependency('gtk+-3.0'), +] + +sources = files('app.vala') + +sources += import( 'gnome' ).compile_resources( + 'project-resources', + 'src/resources/resources.gresource.xml', + source_dir: 'src/resources', +) + +executable('app_name', sources, dependencies: dependencies) +``` + + +### Adding to Vala's search path + +So far we have covered the cases where the VAPI file is either +distributed with Vala or the library. A VAPI can also be included in +the source files of your project. The convention is to put it in the +`vapi` directory of your project. + +This is needed when a library does not have a VAPI or your project +needs to link to another component in the project that uses the C ABI. +For example if part of the project is written in C. + +The Vala compiler's `--vapidir` option is used to add the project +directory to the VAPI search path. In Meson this is done with the +`add_project_arguments()` function: + +```meson +project('vala app', 'vala', 'c') + +vapi_dir = meson.current_source_dir() / 'vapi' + +add_project_arguments(['--vapidir', vapi_dir], language: 'vala') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), + dependency('foo'), # 'foo.vapi' will be resolved as './vapi/foo.vapi' +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` + +If the VAPI is for an external library then make sure that the VAPI +name corresponds to the pkg-config file name. + +The [`vala-extra-vapis` +repository](https://gitlab.gnome.org/GNOME/vala-extra-vapis) is a +community maintained repository of VAPIs that are not distributed. +Developers use the repository to share early work on new bindings and +improvements to existing bindings. So the VAPIs can frequently change. +It is recommended VAPIs from this repository are copied in to your +project's source files. + +This also works well for starting to write new bindings before they +are shared with the `vala-extra-vapis` repository. + + +### Libraries without pkg-config files + +A library that does not have a corresponding pkg-config file may mean +`dependency()` is unsuitable for finding the C and Vala interface +files. In this case it is necessary to use the `find_library()` method +of the compiler object. + +The first example uses Vala's POSIX binding. There is no pkg-config +file because POSIX includes the standard C library on Unix systems. +All that is needed is the VAPI file, `posix.vapi`. This is included +with Vala and installed in Vala's standard search path. Meson just +needs to be told to only find the library for the Vala compiler: + +```meson +project('vala app', 'vala', 'c') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), + meson.get_compiler('vala').find_library('posix'), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` + +The next example shows how to link with a C library where no +additional VAPI is needed. The standard maths functions are already +bound in `glib-2.0.vapi`, but the GNU C library requires linking to +the maths library separately. In this example Meson is told to find +the library only for the C compiler: + +```meson +project('vala app', 'vala', 'c') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), + meson.get_compiler('c').find_library('m', required: false), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` + +The `required: false` means the build will continue when using another +C library that does not separate the maths library. See [Add math +library (-lm) portably](howtox.md#add-math-library-lm-portably). + +The final example shows how to use a library that does not have a +pkg-config file and the VAPI is in the `vapi` directory of your +project source files: + +```meson +project('vala app', 'vala', 'c') + +vapi_dir = meson.current_source_dir() / 'vapi' + +add_project_arguments(['--vapidir', vapi_dir], language: 'vala') + +dependencies = [ + dependency('glib-2.0'), + dependency('gobject-2.0'), + meson.get_compiler('c').find_library('foo'), + meson.get_compiler('vala').find_library('foo', dirs: vapi_dir), +] + +sources = files('app.vala') + +executable('app_name', sources, dependencies: dependencies) +``` +The `find_library()` method of the C compiler object will try to find the C +header files and the library to link with. + +The `find_library()` method of the Vala compiler object needs to have the `dir` +keyword added to include the project VAPI directory. This is not added +automatically by `add_project_arguments()`. + +### Working with the Vala Preprocessor + +Passing arguments to [Vala's +preprocessor](https://wiki.gnome.org/Projects/Vala/Manual/Preprocessor) +requires specifying the language as `vala`. For example, the following +statement sets the preprocessor symbol `USE_FUSE`: + +```meson +add_project_arguments('-D', 'USE_FUSE', language: 'vala') +``` + +If you need to pass an argument to the C pre-processor then specify +the language as c. For example to set FUSE_USE_VERSION to 26 use: + +```meson +add_project_arguments('-DFUSE_USE_VERSION=26', language: 'c') +``` + +## Building libraries + + +### Changing C header and VAPI names + +Meson's [`library`](Reference-manual.md#library) target automatically +outputs the C header and the VAPI. They can be renamed by setting the +`vala_header` and `vala_vapi` arguments respectively: + +```meson +foo_lib = shared_library('foo', 'foo.vala', + vala_header: 'foo.h', + vala_vapi: 'foo-1.0.vapi', + dependencies: [glib_dep, gobject_dep], + install: true, + install_dir: [true, true, true]) +``` + +In this example, the second and third elements of the `install_dir` +array indicate the destination with `true` to use default directories +(i.e. `include` and `share/vala/vapi`). + + +### GObject Introspection and language bindings + +A 'binding' allows another programming language to use a library +written in Vala. Because Vala uses the GObject type system as its +runtime type system it is very easy to use introspection to generate a +binding. A Meson build of a Vala library can generate the GObject +introspection metadata. The metadata is then used in separate projects +with [language specific +tools](https://wiki.gnome.org/Projects/Vala/LibraryWritingBindings) to +generate a binding. + +The main form of metadata is a GObject Introspection Repository (GIR) +XML file. GIRs are mostly used by languages that generate bindings at +compile time. Languages that generate bindings at runtime mostly use a +typelib file, which is generated from the GIR. + +Meson can generate a GIR as part of the build. For a Vala library the +`vala_gir` option has to be set for the `library`: + +```meson +foo_lib = shared_library('foo', 'foo.vala', + vala_gir: 'Foo-1.0.gir', + dependencies: [glib_dep, gobject_dep], + install: true, + install_dir: [true, true, true, true]) +``` + +The `true` value in `install_dir` tells Meson to use the default +directory (i.e. `share/gir-1.0` for GIRs). The fourth element in the +`install_dir` array indicates where the GIR file will be installed. + +To then generate a typelib file use a custom target with the +`g-ir-compiler` program and a dependency on the library: + +```meson +g_ir_compiler = find_program('g-ir-compiler') +custom_target('foo typelib', command: [g_ir_compiler, '--output', '@OUTPUT@', '@INPUT@'], + input: meson.current_build_dir() / 'Foo-1.0.gir', + output: 'Foo-1.0.typelib', + depends: foo_lib, + install: true, + install_dir: get_option('libdir') / 'girepository-1.0') +``` diff --git a/meson/docs/markdown/Videos.md b/meson/docs/markdown/Videos.md new file mode 100644 index 000000000..fb600382e --- /dev/null +++ b/meson/docs/markdown/Videos.md @@ -0,0 +1,45 @@ +--- +short-description: Videos about Meson +... + +# Videos + + - [Behind (and under) the scenes of the Meson build + system](https://www.youtube.com/watch?v=iLN6wL7ExHU), Linux.conf.au + 2020 + + - [Behind the Scenes of a C++ Build + System](https://www.youtube.com/watch?v=34KzT2yvQuM), CppCon 2019 + + - [Compiling Multi-Million Line C++ Code Bases Effortlessly with the + Meson Build system](https://www.youtube.com/watch?v=SCZLnopmYBM), + CppCon 2018 + + - [The Meson Build System, 4+ years of work to become an overnight + success](https://www.youtube.com/watch?v=gHdTzdXkhRY), Linux.conf.au 2018 + + - [Power through simplicity, using Python in the Meson Build + System](https://youtu.be/3jF3oVsjIEM), Piter.py, 2017 + + - [Meson and the changing Linux build + landscape](https://media.ccc.de/v/ASG2017-111-meson_and_the_changing_linux_build_landscape), + All Systems Go 2017 + + - [Meson, compiling the world with + Python](https://www.youtube.com/watch?v=sEO4DC8hm34), Europython + 2017 + + - [Builds, dependencies and deployment in a modern multiplatform + world](https://www.youtube.com/embed/CTJtKtQ8R5k), Linux.conf.au + 2016 + + - [New world, new tools](https://www.youtube.com/embed/0-gx1qU2pPo), + Libre Application Summit 2016 + + - [Making build systems not + suck](https://www.youtube.com/embed/KPi0AuVpxLI), Linux.conf.au + 2015, Auckland, New Zealand + + - [Lightning talk at FOSDEM + 2014](http://mirror.onet.pl/pub/mirrors/video.fosdem.org/2014/H2215_Ferrer/Sunday/Introducing_the_Meson_build_system.webm), + The first ever public presentation on Meson diff --git a/meson/docs/markdown/Vs-External.md b/meson/docs/markdown/Vs-External.md new file mode 100644 index 000000000..ab3d191f9 --- /dev/null +++ b/meson/docs/markdown/Vs-External.md @@ -0,0 +1,57 @@ +# Visual Studio's external build projects + +Visual Studio supports developing projects that have an external build +system. If you wish to use this integration method, here is how you +set it up. This documentation describes Visual Studio 2019. Other +versions have not been tested, but they should work roughly in the +same way. + +## Creating and compiling + +Check out your entire project in some directory. Then open Visual +Studio and select `File -> New -> Project` and from the list of +project types select `Makefile project`. Click `Next`. + +Type your project's name In the `Project name` entry box. In this +example we're going to use `testproj`. Next select the `Location` +entry and browse to the root of your projet sources. Make sure that +the checkbox `Place solution and project in the same directory` is +checked. Click `Create`. + +The next dialog page defines build commands, which you should set up +as follows: + +| entry | value | +| ----- | ----- | +|build | `meson compile -C $(Configuration)` | +|clean | `meson compile -C $(Configuration) --clean` | +|rebuild| `meson compile -C $(Configuration) --clean && meson compile -C $(Configuration)` | +|Output | `$(Configuration)\name_of_your_executable.exe| + + +Then click `Finish`. + +Visual Studio has created a subdirectory in your source root. It is +named after the project name. In this case it would be `testproj`. Now +you need to set up Meson for building both Debug and Release versions +in this directory. Open a VS dev tool terminal, go to the source root +and issue the following commands. + +``` +meson testproj\Debug +meson testproj\Release --buildtype=debugoptimized +``` + +Now you should have a working VS solution that compiles and runs both +in Debug and Release modes. + +## Adding sources to the project + +This project is not very useful on its own, because it does not list +any source files. VS does not seem to support adding entire source +trees at once, so you have to add sources to the solution manually. + +In the main view go to `Solution Explorer`, right click on the project +you just created and select `Add -> Existing Item`, browse to your +source tree and select all files you want to have in this project. Now +you can use the editor and debugger as in a normal VS project. diff --git a/meson/docs/markdown/Windows-module.md b/meson/docs/markdown/Windows-module.md new file mode 100644 index 000000000..a7131a734 --- /dev/null +++ b/meson/docs/markdown/Windows-module.md @@ -0,0 +1,30 @@ +# Windows module + +This module provides functionality used to build applications for +Windows. + +## Methods + +### compile_resources + +Compiles Windows `rc` files specified in the positional arguments. +Returns an opaque object that you put in the list of sources for the +target you want to have the resources in. This method has the +following keyword argument. + +- `args` lists extra arguments to pass to the resource compiler +- `depend_files` lists resource files that the resource script depends on + (e.g. bitmap, cursor, font, html, icon, message table, binary data or manifest + files referenced by the resource script) (*since 0.47.0*) +- `depends` lists target(s) that this target depends on, even though it does not + take them as an argument (e.g. as above, but generated) (*since 0.47.0*) +- `include_directories` lists directories to be both searched by the resource + compiler for referenced resource files, and added to the preprocessor include + search path. + +The resource compiler executable used is the first which exists from the +following list: + +1. The `windres` executable given in the `[binaries]` section of the cross-file +2. The `WINDRES` environment variable +3. The resource compiler which is part of the same toolset as the C or C++ compiler in use. diff --git a/meson/docs/markdown/Wrap-best-practices-and-tips.md b/meson/docs/markdown/Wrap-best-practices-and-tips.md new file mode 100644 index 000000000..70b02250f --- /dev/null +++ b/meson/docs/markdown/Wrap-best-practices-and-tips.md @@ -0,0 +1,158 @@ +# Wrap best practices and tips + +There are several things you need to take into consideration when +writing a Meson build definition for a project. This is especially +true when the project will be used as a subproject. This page lists a +few things to consider when writing your definitions. + +## Do not put config.h in external search path + +Many projects use a `config.h` header file that they use for +configuring their project internally. These files are never installed +to the system header files so there are no inclusion collisions. This +is not the case with subprojects, your project tree may have an +arbitrary number of configuration files, so we need to ensure they +don't clash. + +The basic problem is that the users of the subproject must be able to +include subproject headers without seeing its `config.h` file. The +most correct solution is to rename the `config.h` file into something +unique, such as `foobar-config.h`. This is usually not feasible unless +you are the maintainer of the subproject in question. + +The pragmatic solution is to put the config header in a directory that +has no other header files and then hide that from everyone else. One +way is to create a top level subdirectory called `internal` and use +that to build your own sources, like this: + +```meson +subdir('internal') # create config.h in this subdir +internal_inc = include_directories('internal') +shared_library('foo', 'foo.c', include_directories : internal_inc) +``` + +Many projects keep their `config.h` in the top level directory that +has no other source files in it. In that case you don't need to move +it but can just do this instead: + +```meson +internal_inc = include_directories('.') # At top level meson.build +``` + +## Make libraries buildable both as static and shared + +Some platforms (e.g. iOS) requires linking everything in your main app +statically. In other cases you might want shared libraries. They are +also faster during development due to Meson's relinking optimization. +However building both library types on all builds is slow and +wasteful. + +Your project should use the `library` method that can be toggled +between shared and static with the `default_library` builtin option. + + +```meson +mylib = library('foo', 'foo.c') +``` + +## Declare generated headers explicitly + +Meson's Ninja backend works differently from Make and other +systems. Rather than processing things directory per directory, it +looks at the entire build definition at once and runs the individual +compile jobs in what might look to the outside as a random order. + +The reason for this is that this is much more efficient so your builds +finish faster. The downside is that you have to be careful with your +dependencies. The most common problem here is headers that are +generated at compile time with e.g. code generators. If these headers +are needed when building code that uses these libraries, the compile +job might be run before the code generation step. The fix is to make +the dependency explicit like this: + +```meson +myheader = custom_target(...) +mylibrary = shared_library(...) +mydep = declare_dependency(link_with : mylibrary, + include_directories : include_directories(...), + sources : myheader) +``` + +And then you can use the dependency in the usual way: + +```meson +executable('dep_using_exe', 'main.c', + dependencies : mydep) +``` + +Meson will ensure that the header file has been built before compiling `main.c`. + +## Avoid exposing compilable source files in declare_dependency + +The main use for the `sources` argument in `declare_dependency` is to +construct the correct dependency graph for the backends, as +demonstrated in the previous section. It is extremely important to +note that it should *not* be used to directly expose compilable +sources (`.c`, `.cpp`, etc.) of dependencies, and should rather only +be used for header/config files. The following example will illustrate +what can go wrong if you accidentally expose compilable source files. + +So you've read about unity builds and how Meson natively supports +them. You decide to expose the sources of dependencies in order to +have unity builds that include their dependencies. For your support +library you do + +```meson +my_support_sources = files(...) + +mysupportlib = shared_library( + ... + sources : my_support_sources, + ...) +mysupportlib_dep = declare_dependency( + ... + link_with : mylibrary, + sources : my_support_sources, + ...) +``` + +And for your main project you do: + +```meson +mylibrary = shared_library( + ... + dependencies : mysupportlib_dep, + ...) +myexe = executable( + ... + link_with : mylibrary, + dependencies : mysupportlib_dep, + ...) +``` + +This is extremely dangerous. When building, `mylibrary` will build and +link the support sources `my_support_sources` into the resulting +shared library. Then, for `myexe`, these same support sources will be +compiled again, will be linked into the resulting executable, in +addition to them being already present in `mylibrary`. This can +quickly run afoul of the [One Definition Rule +(ODR)](https://en.wikipedia.org/wiki/One_Definition_Rule) in C++, as +you have more than one definition of a symbol, yielding undefined +behavior. While C does not have a strict ODR rule, there is no +language in the standard which guarantees such behavior to work. +Violations of the ODR can lead to weird idiosyncratic failures such as +segfaults. In the overwhelming number of cases, exposing library +sources via the `sources` argument in `declare_dependency` is thus +incorrect. If you wish to get full cross-library performance, consider +building `mysupportlib` as a static library instead and employing LTO. + +There are exceptions to this rule. If there are some natural +constraints on how your library is to be used, you can expose sources. +For instance, the WrapDB module for GoogleTest directly exposes the +sources of GTest and GMock. This is valid, as GTest and GMock will +only ever be used in *terminal* link targets. A terminal target is the +final target in a dependency link chain, for instance `myexe` in the +last example, whereas `mylibrary` is an intermediate link target. For +most libraries this rule is not applicable though, as you cannot in +general control how others consume your library, and as such should +not expose sources. diff --git a/meson/docs/markdown/Wrap-dependency-system-manual.md b/meson/docs/markdown/Wrap-dependency-system-manual.md new file mode 100644 index 000000000..eb5de1b13 --- /dev/null +++ b/meson/docs/markdown/Wrap-dependency-system-manual.md @@ -0,0 +1,266 @@ +# Wrap dependency system manual + +One of the major problems of multiplatform development is wrangling +all your dependencies. This is awkward on many platforms, especially +on ones that do not have a built-in package manager. The latter problem +has been worked around by having third party package managers. They +are not really a solution for end user deployment, because you can't +tell them to install a package manager just to use your app. On these +platforms you must produce self-contained applications. Same applies +when destination platform is missing (up-to-date versions of) your +application's dependencies. + +The traditional approach to this has been to bundle dependencies +inside your own project. Either as prebuilt libraries and headers or +by embedding the source code inside your source tree and rewriting +your build system to build them as part of your project. + +This is both tedious and error prone because it is always done by +hand. The Wrap dependency system of Meson aims to provide an automated +way to do this. + +## How it works + +Meson has a concept of [subprojects](Subprojects.md). They are a way +of nesting one Meson project inside another. Any project that builds +with Meson can detect that it is built as a subproject and build +itself in a way that makes it easy to use (usually this means as a +static library). + +To use this kind of a project as a dependency you could just copy and +extract it inside your project's `subprojects` directory. + +However there is a simpler way. You can specify a Wrap file that tells +Meson how to download it for you. If you then use this subproject in +your build, Meson will automatically download and extract it during +build. This makes subproject embedding extremely easy. + +All wrap files must have a name of `.wrap` form and be +in `subprojects` dir. + +Currently Meson has four kinds of wraps: +- wrap-file +- wrap-git +- wrap-hg +- wrap-svn + +## wrap format + +Wrap files are written in ini format, with a single header containing +the type of wrap, followed by properties describing how to obtain the +sources, validate them, and modify them if needed. An example +wrap-file for the wrap named `libfoobar` would have a filename +`libfoobar.wrap` and would look like this: + +```ini +[wrap-file] +directory = libfoobar-1.0 + +source_url = https://example.com/foobar-1.0.tar.gz +source_filename = foobar-1.0.tar.gz +source_hash = 5ebeea0dfb75d090ea0e7ff84799b2a7a1550db3fe61eb5f6f61c2e971e57663 +``` + +An example wrap-git will look like this: + +```ini +[wrap-git] +url = https://github.com/libfoobar/libfoobar.git +revision = head +``` + +## Accepted configuration properties for wraps + +- `directory` - name of the subproject root directory, defaults to the + name of the wrap. + +Since *0.55.0* those can be used in all wrap types, they were +previously reserved to `wrap-file`: + +- `patch_url` - download url to retrieve an optional overlay archive +- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0* +- `patch_filename` - filename of the downloaded overlay archive +- `patch_hash` - sha256 checksum of the downloaded overlay archive +- `patch_directory` - *Since 0.55.0* Overlay directory, alternative to `patch_filename` in the case + files are local instead of a downloaded archive. The directory must be placed in + `subprojects/packagefiles`. + +### Specific to wrap-file +- `source_url` - download url to retrieve the wrap-file source archive +- `source_fallback_url` - fallback URL to be used when download from `source_url` fails *Since: 0.55.0* +- `source_filename` - filename of the downloaded source archive +- `source_hash` - sha256 checksum of the downloaded source archive +- `lead_directory_missing` - for `wrap-file` create the leading + directory name. Needed when the source file does not have a leading + directory. + +Since *0.55.0* it is possible to use only the `source_filename` and +`patch_filename` value in a .wrap file (without `source_url` and +`patch_url`) to specify a local archive in the +`subprojects/packagefiles` directory. The `*_hash` entries are +optional when using this method. This method should be preferred over +the old `packagecache` approach described below. + +Since *0.49.0* if `source_filename` or `patch_filename` is found in the +project's `subprojects/packagecache` directory, it will be used instead +of downloading the file, even if `--wrap-mode` option is set to +`nodownload`. The file's hash will be checked. + +### Specific to VCS-based wraps +- `url` - name of the wrap-git repository to clone. Required. +- `revision` - name of the revision to checkout. Must be either: a + valid value (such as a git tag) for the VCS's `checkout` command, or + (for git) `head` to track upstream's default branch. Required. + +### Specific to wrap-git +- `depth` - shallowly clone the repository to X number of commits. Note + that git always allow shallowly cloning branches, but in order to + clone commit ids shallowly, the server must support + `uploadpack.allowReachableSHA1InWant=true`. *(since 0.52.0)* +- `push-url` - alternative url to configure as a git push-url. Useful if + the subproject will be developed and changes pushed upstream. + *(since 0.37.0)* +- `clone-recursive` - also clone submodules of the repository + *(since 0.48.0)* + +## wrap-file with Meson build patch + +Unfortunately most software projects in the world do not build with +Meson. Because of this Meson allows you to specify a patch URL. + +For historic reasons this is called a "patch", however, it serves as an +overlay to add or replace files rather than modifying them. The file +must be an archive; it is downloaded and automatically extracted into +the subproject. The extracted files will include a Meson build +definition for the given subproject. + +This approach makes it extremely simple to embed dependencies that +require build system changes. You can write the Meson build definition +for the dependency in total isolation. This is a lot better than doing +it inside your own source tree, especially if it contains hundreds of +thousands of lines of code. Once you have a working build definition, +just zip up the Meson build files (and others you have changed) and +put them somewhere where you can download them. + +Prior to *0.55.0* Meson build patches were only supported for +wrap-file mode. When using wrap-git, the repository must contain all +Meson build definitions. Since *0.55.0* Meson build patches are +supported for any wrap modes, including wrap-git. + +## `provide` section + +*Since *0.55.0* + +Wrap files can define the dependencies it provides in the `[provide]` +section. + +```ini +[provide] +dependency_names = foo-1.0 +``` + +When a wrap file provides the dependency `foo-1.0`, as above, any call to +`dependency('foo-1.0')` will automatically fallback to that subproject even if +no `fallback` keyword argument is given. A wrap file named `foo.wrap` implicitly +provides the dependency name `foo` even when the `[provide]` section is missing. + +Optional dependencies, like `dependency('foo-1.0', required: get_option('foo_opt'))` +where `foo_opt` is a feature option set to `auto`, will not fallback to the +subproject defined in the wrap file, for 2 reasons: +- It allows for looking the dependency in other ways first, for example using + `cc.find_library('foo')`, and only fallback if that fails: + +```meson +# this won't use fallback defined in foo.wrap +foo_dep = dependency('foo-1.0', required: false) +if not foo_dep.found() + foo_dep = cc.find_library('foo', has_headers: 'foo.h', required: false) + if not foo_dep.found() + # This will use the fallback + foo_dep = dependency('foo-1.0') + # or + foo_dep = dependency('foo-1.0', required: false, fallback: 'foo') + endif +endif +``` + +- Sometimes not-found dependency is preferable to a fallback when the + feature is not explicitly requested by the user. In that case + `dependency('foo-1.0', required: get_option('foo_opt'))` will only + fallback when the user sets `foo_opt` to `enabled` instead of + `auto`. +*Since 0.58.0* optional dependency like above will fallback to the subproject +defined in the wrap file in the case `wrap_mode` is set to `forcefallback` +or `force_fallback_for` contains the subproject. + +If it is desired to fallback for an optional dependency, the +`fallback` or `allow_fallback` keyword arguments must be passed +explicitly. *Since 0.56.0*, `dependency('foo-1.0', required: +get_option('foo_opt'), allow_fallback: true)` will use the fallback +even when `foo_opt` is set to `auto`. On version *0.55.0* the same +effect could be achieved with `dependency('foo-1.0', required: +get_option('foo_opt'), fallback: 'foo')`. + +This mechanism assumes the subproject calls +`meson.override_dependency('foo-1.0', foo_dep)` so Meson knows which +dependency object should be used as fallback. Since that method was +introduced in version *0.54.0*, as a transitional aid for projects +that do not yet make use of it the variable name can be provided in +the wrap file with entries in the format `foo-1.0 = foo_dep`. + +For example when using a recent enough version of glib that uses +`meson.override_dependency()` to override `glib-2.0`, `gobject-2.0` +and `gio-2.0`, a wrap file would look like: + +```ini +[wrap-git] +url=https://gitlab.gnome.org/GNOME/glib.git +revision=glib-2-62 + +[provide] +dependency_names = glib-2.0, gobject-2.0, gio-2.0 +``` + +With older version of glib dependency variable names need to be +specified: + +```ini +[wrap-git] +url=https://gitlab.gnome.org/GNOME/glib.git +revision=glib-2-62 + +[provide] +glib-2.0=glib_dep +gobject-2.0=gobject_dep +gio-2.0=gio_dep +``` + +Programs can also be provided by wrap files, with the `program_names` +key: + +```ini +[provide] +program_names = myprog, otherprog +``` + +With such wrap file, `find_program('myprog')` will automatically +fallback to use the subproject, assuming it uses +`meson.override_find_program('myprog')`. + +## Using wrapped projects + +Wraps provide a convenient way of obtaining a project into your +subproject directory. Then you use it as a regular subproject (see +[subprojects](Subprojects.md)). + +## Getting wraps + +Usually you don't want to write your wraps by hand. + +There is an online repository called +[WrapDB](https://wrapdb.mesonbuild.com) that provides many +dependencies ready to use. You can read more about WrapDB +[here](Using-the-WrapDB.md). + +There is also a Meson subcommand to get and manage wraps (see [using +wraptool](Using-wraptool.md)). diff --git a/meson/docs/markdown/Wrap-review-guidelines.md b/meson/docs/markdown/Wrap-review-guidelines.md new file mode 100644 index 000000000..670baa6b3 --- /dev/null +++ b/meson/docs/markdown/Wrap-review-guidelines.md @@ -0,0 +1,94 @@ +# Wrap review guidelines + +In order to get a package in the Wrap database it must be reviewed and +accepted by someone with admin rights. Here is a list of items to +check in the review. If some item is not met it does not mean that the +package is rejected. What should be done will be determined on a +case-by-case basis. Similarly meeting all these requirements does not +guarantee that the package will get accepted. Use common sense. + +## Setting up the tools + +The [mesonwrap repository](https://github.com/mesonbuild/mesonwrap) +provides tools to maintain the WrapDB. Read-only features such can be +used by anyone without Meson admin rights. + +## Personal access token + +Some tools require access to the Github API. A [personal access +token](https://github.com/settings/tokens) may be required if the +freebie Github API quota is exhausted. `public_repo` scope is required +for write operations. + +``` +$ cat ~/.config/mesonwrap.ini +[mesonwrap] +github_token = +``` + +## Setting up the review tooling + +The [Mesonwrap](https://github.com/mesonbuild/mesonwrap/) repository +contains review tooling. It is used to do the actual review, but +submitters can also use it to check their MRs. All issues reported by +the tool must be fixed, so using the tool can speed up the review +process considerably. + +The tool has some dependencies that are not available in all Linux +distributions. Thus using a Python +[Virtualenv](https://virtualenv.pypa.io/en/stable/). The tool can be +installed with the following commands. + +``` +git clone https://github.com/mesonbuild/mesonwrap.git +cd mesonwrap +python3 -m venv venv +bash +source venv/bin/activate +pip install -r requirements.txt +# You may need to install Meson and Ninja here as well depending on your setup +python3 setup.py install +mesonwrap review +exit +``` + +## Reviewing code + +``` +mesonwrap review zlib --pull-request=1 [--approve] +``` + +Since not every check can be automated please pay attention to the +following during the review: + +- Download link points to an authoritative upstream location. +- Version branch is created from master. +- Except for the existing code, `LICENSE.build` is mandatory. +- `project()` has a version and it matches the source version. +- `project()` has a license. +- Complex `configure_file()` inputs are documented. + If the file is a copy of a project file make sure it is clear what was changed. +- Unit tests are enabled if the project provides them. +- There are no guidelines if `install()` is a good or a bad thing in wraps. +- If the project can't be tested on the host platform consider using the `--cross-file` flag. + See [the issue](https://github.com/mesonbuild/mesonwrap/issues/125). + +Encourage wrap readability. Use your own judgement. + +## Approval + +If the code looks good use the `--approve` flag to merge it. +The tool automatically creates a release. + +If you need to create a release manually (because, for example, a MR +was merged by hand), the command to do it is the following: + +```shell +mesonwrap publish reponame version +``` + +An example invocation would look like this: + +```shell +mesonwrap publish expat 2.2.9 +``` diff --git a/meson/docs/markdown/Wrapdb-projects.md b/meson/docs/markdown/Wrapdb-projects.md new file mode 100644 index 000000000..64f3cb62d --- /dev/null +++ b/meson/docs/markdown/Wrapdb-projects.md @@ -0,0 +1,16 @@ +# Meson WrapDB packages + +This is a list of projects that have either an upstream Meson build system, or a +port maintained by the Meson team. [They can be used by your project to provide +its dependencies](Wrap-dependency-system-manual.md). + +Use the command line `meson wrap install ` to install the wrap file of +any of those projects into your project's `subprojects/` directory. +See [Meson command line documentation](Using-wraptool.md). + +If you wish to add your own project into this list, please submit your wrap file +in a [Pull Request](https://github.com/mesonbuild/wrapdb). +See [Meson documentation](Adding-new-projects-to-wrapdb.md) +for more details. + +{{ wrapdb-table.md }} diff --git a/meson/docs/markdown/_Sidebar.md b/meson/docs/markdown/_Sidebar.md new file mode 100644 index 000000000..0ca1762b5 --- /dev/null +++ b/meson/docs/markdown/_Sidebar.md @@ -0,0 +1,15 @@ +## Quick References + +* [Functions](Reference-manual.md) +* [Options](Build-options.md) +* [Configuration](Configuration.md) +* [Dependencies](Dependencies.md) +* [Tests](Unit-tests.md) +* [Syntax](Syntax.md) + +### [Modules](Module-reference.md) + +* [gnome](Gnome-module.md) +* [i18n](i18n-module.md) +* [pkgconfig](Pkgconfig-module.md) +* [rust](Rust-module.md) diff --git a/meson/docs/markdown/_include_qt_base.md b/meson/docs/markdown/_include_qt_base.md new file mode 100644 index 000000000..bf5e31b03 --- /dev/null +++ b/meson/docs/markdown/_include_qt_base.md @@ -0,0 +1,160 @@ +## compile_resources + +*New in 0.59.0* + +Compiles Qt's resources collection files (.qrc) into c++ files for compilation. + +It takes no positional arguments, and the following keyword arguments: + - `name` (string | empty): if provided a single .cpp file will be generated, + and the output of all qrc files will be combined in this file, otherwise + each qrc file be written to it's own cpp file. + - `sources` (File | string)[]: A list of sources to be transpiled. Required, + must have at least one source + - `extra_args` string[]: Extra arguments to pass directly to `qt-rcc` + - `method` string: The method to use to detect qt, see `dependency()` for more + information. + +## compile_ui + +*New in 0.59.0* + +Compiles Qt's ui files (.ui) into header files. + +It takes no positional arguments, and the following keyword arguments: + - `sources` (File | string)[]: A list of sources to be transpiled. Required, + must have at least one source + - `extra_args` string[]: Extra arguments to pass directly to `qt-uic` + - `method` string: The method to use to detect qt, see `dependency()` for more + information. + +## compile_moc + +*New in 0.59.0* + +Compiles Qt's moc files (.moc) into header and/or source files. At least one of +the keyword arguments `headers` and `sources` must be provided. + +It takes no positional arguments, and the following keyword arguments: + - `sources` (File | string)[]: A list of sources to be transpiled into .moc + files for manual inclusion. + - `headers` (File | string)[]: A list of headers to be transpiled into .cpp files + - `extra_args` string[]: Extra arguments to pass directly to `qt-moc` + - `method` string: The method to use to detect qt, see `dependency()` for more + information. + - `include_directories` (string | IncludeDirectory)[]: A list of `include_directory()` + objects used when transpiling the .moc files + +## preprocess + +Consider using `compile_resources`, `compile_ui`, and `compile_moc` instead. + +Takes sources for moc, uic, and rcc, and converts them into c++ files for +compilation. + +Has the following signature: `qt.preprocess(name: str | None, *sources: str)` + +If the `name` parameter is passed then all of the rcc files will be wirtten to a single output file + +The variadic `sources` arguments have been deprecated since Meson 0.59.0, as has the `sources` keyword argument. These passed files unmodified through the preprocessor, don't do this, just add the output of the generator to another sources list: +```meson +sources = files('a.cpp', 'main.cpp', 'bar.c') +sources += qt.preprocess(qresources : ['resources']) +``` + +This method takes the following keyword arguments: + - `qresources` (string | File)[]: Passed to the RCC compiler + - `ui_files`: (string | File | CustomTarget)[]: Passed the `uic` compiler + - `moc_sources`: (string | File | CustomTarget)[]: Passed the `moc` compiler. These are converted into .moc files meant to be `#include`ed + - `moc_headers`: (string | File | CustomTarget)[]: Passied the `moc` compiler. These will be converted into .cpp files + - `include_directories` (IncludeDirectories | string)[], the directories to add to header search path for `moc` + - `moc_extra_arguments` string[]: any additional arguments to `moc`. Since v0.44.0. + - `uic_extra_arguments` string[]: any additional arguments to `uic`. Since v0.49.0. + - `rcc_extra_arguments` string[]: any additional arguments to `rcc`. Since v0.49.0. + - `dependencies` Dependency[]: dependency objects needed by moc. Available since v0.48.0. + - `sources`: a list of extra sources, which are added to the output unchaged. Deprecated in 0.59.0. + +It returns an array of targets and sources to pass to a compilation target. + +## compile_translations (since v0.44.0) + +This method generates the necessary targets to build translation files with +lrelease, it takes no positional arguments, and the following keyword arguments: + + - `ts_files` (str | File)[], the list of input translation files produced by Qt's lupdate tool. + - `install` bool: when true, this target is installed during the install step (optional). + - `install_dir` string: directory to install to (optional). + - `build_by_default` bool: when set to true, to have this target be built by + default, that is, when invoking `meson compile`; the default value is false + (optional). + - `qresource` string: rcc source file to extract ts_files from; cannot be used + with ts_files kwarg. Available since v0.56.0. + - `rcc_extra_arguments` string[]: any additional arguments to `rcc` (optional), + when used with `qresource. Available since v0.56.0. + +Returns either: a list of custom targets for the compiled +translations, or, if using a `qresource` file, a single custom target +containing the processed source file, which should be passed to a main +build target. + +## has_tools + +This method returns `true` if all tools used by this module are found, +`false` otherwise. + +It should be used to compile optional Qt code: +```meson +qt5 = import('qt5') +if qt5.has_tools(required: get_option('qt_feature')) + moc_files = qt5.preprocess(...) + ... +endif +``` + +This method takes the following keyword arguments: +- `required` bool | FeatureOption: by default, `required` is set to `false`. If `required` is set to + `true` or an enabled [`feature`](Build-options.md#features) and some tools are + missing Meson will abort. +- `method` string: method used to find the Qt dependency (`auto` by default). + +*Since: 0.54.0* + +## Dependencies + +See [Qt dependencies](Dependencies.md#qt4-qt5) + +The 'modules' argument is used to include Qt modules in the project. +See the Qt documentation for the [list of +modules](http://doc.qt.io/qt-5/qtmodules.html). + +The 'private_headers' argument allows usage of Qt's modules private +headers. (since v0.47.0) + +## Example +A simple example would look like this: + +```meson +qt5 = import('qt5') +qt5_dep = dependency('qt5', modules: ['Core', 'Gui']) +inc = include_directories('includes') +moc_files = qt5.compile_moc(headers : 'myclass.h', + extra_arguments: ['-DMAKES_MY_MOC_HEADER_COMPILE'], + include_directories: inc, + dependencies: qt5_dep) +translations = qt5.compile_translations(ts_files : 'myTranslation_fr.ts', build_by_default : true) +executable('myprog', 'main.cpp', 'myclass.cpp', moc_files, + include_directories: inc, + dependencies : qt5_dep) +``` + +Sometimes, translations are embedded inside the binary using qresource +files. In this case the ts files do not need to be explicitly listed, +but will be inferred from the built qm files listed in the qresource +file. For example: + +```meson +qt5 = import('qt5') +qt5_dep = dependency('qt5', modules: ['Core', 'Gui']) +lang_cpp = qt5.compile_translations(qresource: 'lang.qrc') +executable('myprog', 'main.cpp', lang_cpp, + dependencies: qt5_dep) +``` diff --git a/meson/docs/markdown/fallback-wraptool.md b/meson/docs/markdown/fallback-wraptool.md new file mode 100644 index 000000000..d4f5af27d --- /dev/null +++ b/meson/docs/markdown/fallback-wraptool.md @@ -0,0 +1,43 @@ +--- +title: fallback wraptool +... + +# In case of emergency + +In case wraptool is down we have created a backup script that you can +use to download wraps directly from the GitHub repos. It is not as +slick and may have bugs but at least it will allow you to use wraps. + +## Using it + +To list all available wraps: + + ghwt.py list + +To install a wrap, go to your source root, make sure that the +`subprojects` directory exists and run this command: + + ghwt.py install [] + +This will stage the subproject ready to use. If you have multiple +subprojects you need to download them all manually. + +Specifying branch name is optional. If not specified, the list of +potential branches is sorted alphabetically and the last branch is +used. + +*Note* The tool was added in 0.32.0, for versions older than that you +need to delete the `foo.wrap` file to work around this issue. + +## How to upgrade an existing dir/fix broken state/any other problem + +Nuke the contents of `subprojects` and start again. + +## Known issues + +Some repositories show up in the list but are not installable. They +would not show up in the real WrapDB because they are works in +progress. + +GitHub web API limits the amount of queries you can do to 60/hour. If +you exceed that you need to wait for the timer to reset. diff --git a/meson/docs/markdown/howtox.md b/meson/docs/markdown/howtox.md new file mode 100644 index 000000000..1521f7244 --- /dev/null +++ b/meson/docs/markdown/howtox.md @@ -0,0 +1,325 @@ +# How do I do X in Meson? + +This page lists code snippets for common tasks. These are written +mostly using the C compiler, but the same approach should work on +almost all other compilers. + +## Set compiler + +When first running Meson, set it in an environment variable. + +```console +$ CC=mycc meson +``` + +Note that environment variables like `CC` only works in native builds. +The `CC` refers to the compiler for the host platform, that is the +compiler used to compile programs that run on the machine we will +eventually install the project on. The compiler used to build things +that run on the machine we do the building can be specified with +`CC_FOR_BUILD`. You can use it in cross builds. + +Note that environment variables are never the idiomatic way to do +anything with Meson, however. It is better to use the native and cross +files. And the tools for the host platform in cross builds can only be +specified with a cross file. + +There is a table of all environment variables supported +[Here](Reference-tables.md#compiler-and-linker-selection-variables) + + +## Set linker + +*New in 0.53.0* + +Like the compiler, the linker is selected via the `_LD` environment variable, or through the `_ld` entry in a native or cross file. You must be aware of +whether you're using a compiler that invokes the linker itself (most +compilers including GCC and Clang) or a linker that is invoked +directly (when using MSVC or compilers that act like it, including +Clang-Cl). With the former `c_ld` or `CC_LD` should be the value to +pass to the compiler's special argument (such as `-fuse-ld` with clang +and gcc), with the latter it should be an executable, such as +`lld-link.exe`. + +*NOTE* In Meson 0.53.0 the `ld` entry in the cross/native file and the +`LD` environment variable were used, this resulted in a large number +of regressions and was changed in 0.53.1 to `_ld` and `_LD`. + +```console +$ CC=clang CC_LD=lld meson +``` + +or + +```console +$ CC=clang-cl CC_LD=link meson +``` + +or in a cross or native file: + +```ini +[binaries] +c = 'clang' +c_ld = 'lld' +``` + +There is a table of all environment variables supported +[Here](Reference-tables.md#compiler-and-linker-selection-variables) + + +## Set default C/C++ language version + +```meson +project('myproj', 'c', 'cpp', + default_options : ['c_std=c11', 'cpp_std=c++11']) +``` + +The language version can also be set on a per-target basis. + +```meson +executable(..., override_options : ['c_std=c11']) +``` + +## Enable threads + +Lots of people seem to do this manually with `find_library('pthread')` +or something similar. Do not do that. It is not portable. Instead do +this. + +```meson +thread_dep = dependency('threads') +executable(..., dependencies : thread_dep) +``` + +## Set extra compiler and linker flags from the outside (when e.g. building distro packages) + +The behavior is the same as with other build systems, with environment +variables during first invocation. Do not use these when you need to +rebuild the source + +```console +$ CFLAGS=-fsomething LDFLAGS=-Wl,--linker-flag meson +``` + +## Use an argument only with a specific compiler + +First check which arguments to use. + +```meson +if meson.get_compiler('c').get_id() == 'clang' + extra_args = ['-fclang-flag'] +else + extra_args = [] +endif +``` + +Then use it in a target. + +```meson +executable(..., c_args : extra_args) +``` + +If you want to use the arguments on all targets, then do this. + +```meson +if meson.get_compiler('c').get_id() == 'clang' + add_global_arguments('-fclang-flag', language : 'c') +endif +``` + +## Set a command's output to configuration + +```meson +txt = run_command('script', 'argument').stdout().strip() +cdata = configuration_data() +cdata.set('SOMETHING', txt) +configure_file(...) +``` + +## Generate configuration data from files + +`The [fs module](#Fs-modules) offers the `read` function` which enables adding +the contents of arbitrary files to configuration data (among other uses): + +```meson +fs = import('fs') +cdata = configuration_data() +copyright = fs.read('LICENSE') +cdata.set('COPYRIGHT', copyright) +if build_machine.system() == 'linux' + os_release = fs.read('/etc/os-release') + cdata.set('LINUX_BUILDER', os_release) +endif +configure_file(...) +``` + +## Generate a runnable script with `configure_file` + +`configure_file` preserves metadata so if your template file has +execute permissions, the generated file will have them too. + +## Producing a coverage report + +First initialize the build directory with this command. + +```console +$ meson -Db_coverage=true +``` + +Then issue the following commands. + +```console +$ meson compile +$ meson test +$ meson compile coverage-html (or coverage-xml) +``` + +The coverage report can be found in the meson-logs subdirectory. + +*New in 0.55.0* llvm-cov support for use with clang + +## Add some optimization to debug builds + +By default the debug build does not use any optimizations. This is the +desired approach most of the time. However some projects benefit from +having some minor optimizations enabled. GCC even has a specific +compiler flag `-Og` for this. To enable its use, just issue the +following command. + +```console +$ meson configure -Dc_args=-Og +``` + +This causes all subsequent builds to use this command line argument. + +## Use address sanitizer + +Clang comes with a selection of analysis tools such as the [address +sanitizer](https://clang.llvm.org/docs/AddressSanitizer.html). Meson +has native support for these with the `b_sanitize` option. + +```console +$ meson -Db_sanitize=address +``` + +After this you just compile your code and run the test suite. Address +sanitizer will abort executables which have bugs so they show up as +test failures. + +## Use Clang static analyzer + +Install scan-build program, then do this: + +```console +$ meson setup builddir +$ ninja -C builddir scan-build +``` + +You can use the `SCANBUILD` environment variable to choose the +scan-build executable. + +```console +$ SCANBUILD= ninja -C builddir scan-build +``` + +You can use it for passing arguments to scan-build program by +creating a script, for example: + +```sh +#!/bin/sh +scan-build -v --status-bugs "$@" +``` + +And then pass it through the variable (remember to use absolute path): + +```console +$ SCANBUILD=$(pwd)/my-scan-build.sh ninja -C builddir scan-build +``` + +## Use profile guided optimization + +Using profile guided optimization with GCC is a two phase +operation. First we set up the project with profile measurements +enabled and compile it. + +```console +$ meson setup -Db_pgo=generate +$ meson compile -C builddir +``` + +Then we need to run the program with some representative input. This +step depends on your project. + +Once that is done we change the compiler flags to use the generated +information and rebuild. + +```console +$ meson configure -Db_pgo=use +$ meson compile +``` + +After these steps the resulting binary is fully optimized. + +## Add math library (`-lm`) portably + +Some platforms (e.g. Linux) have a standalone math library. Other +platforms (pretty much everyone else) do not. How to specify that `m` +is used only when needed? + +```meson +cc = meson.get_compiler('c') +m_dep = cc.find_library('m', required : false) +executable(..., dependencies : m_dep) +``` + +## Install an executable to `libexecdir` + +```meson +executable(..., install : true, install_dir : get_option('libexecdir')) +``` + +## Use existing `Find.cmake` files + +Meson can use the CMake `find_package()` ecosystem if CMake is +installed. To find a dependency with custom `Find.cmake`, set +the `cmake_module_path` property to the path in your project where the +CMake scripts are stored. + +Example for a `FindCmakeOnlyDep.cmake` in a `cmake` subdirectory: + +```meson +cm_dep = dependency('CmakeOnlyDep', cmake_module_path : 'cmake') +``` + +The `cmake_module_path` property is only needed for custom CMake scripts. System +wide CMake scripts are found automatically. + +More information can be found [here](Dependencies.md#cmake) + +## Get a default not-found dependency? + +```meson +null_dep = dependency('', required : false) +``` + +This can be used in cases where you want a default value, but might override it +later. + +```meson +# Not needed on Windows! +my_dep = dependency('', required : false) +if host_machine.system() in ['freebsd', 'netbsd', 'openbsd', 'dragonfly'] + my_dep = dependency('some dep', required : false) +elif host_machine.system() == 'linux' + my_dep = dependency('some other dep', required : false) +endif + +executable( + 'myexe', + my_sources, + deps : [my_dep] +) +``` diff --git a/meson/docs/markdown/i18n-module.md b/meson/docs/markdown/i18n-module.md new file mode 100644 index 000000000..4948fab42 --- /dev/null +++ b/meson/docs/markdown/i18n-module.md @@ -0,0 +1,53 @@ +# I18n module + +This module provides internationalisation and localisation functionality. + +## Usage + +To use this module, just do: **`i18n = import('i18n')`**. The +following functions will then be available as methods on the object +with the name `i18n`. You can, of course, replace the name `i18n` with +anything else. + +### i18n.gettext() + +Sets up gettext localisation so that translations are built and placed +into their proper locations during install. Takes one positional +argument which is the name of the gettext module. + +* `args`: list of extra arguments to pass to `xgettext` when + generating the pot file +* `data_dirs`: (*Added 0.36.0*) list of directories to be set for + `GETTEXTDATADIRS` env var (Requires gettext 0.19.8+), used for local + its files +* `languages`: list of languages that are to be generated. As of + 0.37.0 this is optional and the + [LINGUAS](https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html) + file is read. +* `preset`: (*Added 0.37.0*) name of a preset list of arguments, + current option is `'glib'`, see + [source](https://github.com/mesonbuild/meson/blob/master/mesonbuild/modules/i18n.py) + for for their value +* `install`: (*Added 0.43.0*) if false, do not install the built translations. +* `install_dir`: (*Added 0.50.0*) override default install location, default is `localedir` + +This function also defines targets for maintainers to use: +**Note**: These output to the source directory + +* `-pot`: runs `xgettext` to regenerate the pot file +* `-update-po`: regenerates the `.po` files from current `.pot` file +* `-gmo`: builds the translations without installing + +### i18n.merge_file() + +This merges translations into a text file using `msgfmt`. See +[custom_target](Reference-manual.md#custom_target) +for normal keywords. In addition it accepts these keywords: + +* `data_dirs`: (*Added 0.41.0*) list of directories for its files (See + also `i18n.gettext()`) +* `po_dir`: directory containing translations, relative to current directory +* `type`: type of file, valid options are `'xml'` (default) and `'desktop'` +* `args`: (*Added 0.51.0*) list of extra arguments to pass to `msgfmt` + +*Added 0.37.0* diff --git a/meson/docs/markdown/images/buildtime.png b/meson/docs/markdown/images/buildtime.png new file mode 100644 index 000000000..2a44422f0 Binary files /dev/null and b/meson/docs/markdown/images/buildtime.png differ diff --git a/meson/docs/markdown/images/conftime.png b/meson/docs/markdown/images/conftime.png new file mode 100644 index 000000000..63754dbab Binary files /dev/null and b/meson/docs/markdown/images/conftime.png differ diff --git a/meson/docs/markdown/images/emptytime.png b/meson/docs/markdown/images/emptytime.png new file mode 100644 index 000000000..d80eab9f2 Binary files /dev/null and b/meson/docs/markdown/images/emptytime.png differ diff --git a/meson/docs/markdown/images/glib_build.png b/meson/docs/markdown/images/glib_build.png new file mode 100644 index 000000000..ddb994780 Binary files /dev/null and b/meson/docs/markdown/images/glib_build.png differ diff --git a/meson/docs/markdown/images/glib_conf.png b/meson/docs/markdown/images/glib_conf.png new file mode 100644 index 000000000..5de60d5a0 Binary files /dev/null and b/meson/docs/markdown/images/glib_conf.png differ diff --git a/meson/docs/markdown/images/glib_empty.png b/meson/docs/markdown/images/glib_empty.png new file mode 100644 index 000000000..5976e7f46 Binary files /dev/null and b/meson/docs/markdown/images/glib_empty.png differ diff --git a/meson/docs/markdown/images/glib_link.png b/meson/docs/markdown/images/glib_link.png new file mode 100644 index 000000000..23d90442b Binary files /dev/null and b/meson/docs/markdown/images/glib_link.png differ diff --git a/meson/docs/markdown/images/gtksample.png b/meson/docs/markdown/images/gtksample.png new file mode 100644 index 000000000..b6557c4e1 Binary files /dev/null and b/meson/docs/markdown/images/gtksample.png differ diff --git a/meson/docs/markdown/images/linux_alldone.png b/meson/docs/markdown/images/linux_alldone.png new file mode 100644 index 000000000..378c893c0 Binary files /dev/null and b/meson/docs/markdown/images/linux_alldone.png differ diff --git a/meson/docs/markdown/images/meson_mac1.png b/meson/docs/markdown/images/meson_mac1.png new file mode 100755 index 000000000..17eae0acf Binary files /dev/null and b/meson/docs/markdown/images/meson_mac1.png differ diff --git a/meson/docs/markdown/images/meson_mac2.png b/meson/docs/markdown/images/meson_mac2.png new file mode 100755 index 000000000..e5c434df1 Binary files /dev/null and b/meson/docs/markdown/images/meson_mac2.png differ diff --git a/meson/docs/markdown/images/meson_mac3.png b/meson/docs/markdown/images/meson_mac3.png new file mode 100755 index 000000000..babada173 Binary files /dev/null and b/meson/docs/markdown/images/meson_mac3.png differ diff --git a/meson/docs/markdown/images/meson_mac4.png b/meson/docs/markdown/images/meson_mac4.png new file mode 100755 index 000000000..e8089ad96 Binary files /dev/null and b/meson/docs/markdown/images/meson_mac4.png differ diff --git a/meson/docs/markdown/images/meson_mac5.png b/meson/docs/markdown/images/meson_mac5.png new file mode 100755 index 000000000..507fcdaa6 Binary files /dev/null and b/meson/docs/markdown/images/meson_mac5.png differ diff --git a/meson/docs/markdown/images/osx_xcode.png b/meson/docs/markdown/images/osx_xcode.png new file mode 100644 index 000000000..89075f308 Binary files /dev/null and b/meson/docs/markdown/images/osx_xcode.png differ diff --git a/meson/docs/markdown/images/py3-install-1.png b/meson/docs/markdown/images/py3-install-1.png new file mode 100644 index 000000000..74f081938 Binary files /dev/null and b/meson/docs/markdown/images/py3-install-1.png differ diff --git a/meson/docs/markdown/images/py3-install-2.png b/meson/docs/markdown/images/py3-install-2.png new file mode 100644 index 000000000..9a8f1fe65 Binary files /dev/null and b/meson/docs/markdown/images/py3-install-2.png differ diff --git a/meson/docs/markdown/images/py3-install-3.png b/meson/docs/markdown/images/py3-install-3.png new file mode 100644 index 000000000..b702910ab Binary files /dev/null and b/meson/docs/markdown/images/py3-install-3.png differ diff --git a/meson/docs/markdown/images/win_dlvs.png b/meson/docs/markdown/images/win_dlvs.png new file mode 100644 index 000000000..938c30d01 Binary files /dev/null and b/meson/docs/markdown/images/win_dlvs.png differ diff --git a/meson/docs/markdown/images/win_downloadmeson.png b/meson/docs/markdown/images/win_downloadmeson.png new file mode 100644 index 000000000..59170390d Binary files /dev/null and b/meson/docs/markdown/images/win_downloadmeson.png differ diff --git a/meson/docs/markdown/images/win_installvs.png b/meson/docs/markdown/images/win_installvs.png new file mode 100644 index 000000000..b2175259a Binary files /dev/null and b/meson/docs/markdown/images/win_installvs.png differ diff --git a/meson/docs/markdown/images/win_vstoolsprompt.png b/meson/docs/markdown/images/win_vstoolsprompt.png new file mode 100644 index 000000000..d3f0c2b87 Binary files /dev/null and b/meson/docs/markdown/images/win_vstoolsprompt.png differ diff --git a/meson/docs/markdown/images/win_working.png b/meson/docs/markdown/images/win_working.png new file mode 100644 index 000000000..4bc46d709 Binary files /dev/null and b/meson/docs/markdown/images/win_working.png differ diff --git a/meson/docs/markdown/index.md b/meson/docs/markdown/index.md new file mode 100644 index 000000000..14bdfbed8 --- /dev/null +++ b/meson/docs/markdown/index.md @@ -0,0 +1,60 @@ +--- +render-subpages: false +... + +# The Meson Build system + +## Overview + +Meson is an open source build system meant to be both extremely fast, +and, even more importantly, as user friendly as possible. + +The main design point of Meson is that every moment a developer spends +writing or debugging build definitions is a second wasted. So is every +second spent waiting for the build system to actually start compiling +code. + +## Features + +* multiplatform support for Linux, macOS, Windows, GCC, Clang, Visual Studio and others +* supported languages include C, C++, D, Fortran, Java, Rust +* build definitions in a very readable and user friendly non-Turing complete DSL +* cross compilation for many operating systems as well as bare metal +* optimized for extremely fast full and incremental builds without sacrificing correctness +* built-in multiplatform dependency provider that works together with distro packages +* fun! + +## Quickstart for beginners + +Are you an absolute beginner when it comes to programming? No worries, +read [this beginner guide](SimpleStart.md) to get started. + +## Community + +There are two main methods of connecting with other Meson +developers. The easiest way for most people is a web chat. The channel +to use is `#mesonbuild` either via Matrix ([web +interface](https://app.element.io/#/room/#mesonbuild:matrix.org)) or +[OFTC IRC](https://www.oftc.net/). + +The second one is the mailing list, which is hosted at +[Google Groups](https://groups.google.com/forum/#!forum/mesonbuild). + +### [Projects using Meson](Users.md) + +Many projects are using Meson and they're +a great resource for learning what to (and what not to!) do when +converting existing projects to Meson. + +[A short list of Meson users can be found here](Users.md) +but there are many more. We would love to hear about your success +stories too and how things could be improved too! + +## Development + +All development on Meson is done on the [GitHub +project](https://github.com/mesonbuild/meson). Instructions for +contributing can be found on the [contribution page](Contributing.md). + + +You do not need to sign a CLA to contribute to Meson. diff --git a/meson/docs/markdown/legal.md b/meson/docs/markdown/legal.md new file mode 100644 index 000000000..a14b7b93e --- /dev/null +++ b/meson/docs/markdown/legal.md @@ -0,0 +1,26 @@ +# Legal information + +Meson is copyrighted by all members of the Meson development team. +Meson is licensed under the [Apache 2 license]. + +Meson is a registered trademark of Jussi Pakkanen. + +## Meson logo licensing + +Meson's logo is (C) Jussi Pakkanen and used by the Meson project with +specific permission. It is not licensed under the same terms as the +rest of the project. + +If you are a third party and want to use the Meson logo, you must +first obtain written permission from Jussi Pakkanen. + +## Website licensing + +The meson website content is released under [Creative Commons +Attribution-ShareAlike 4.0 International]. + +All code samples on the website are released under [CC0 1.0 Universal]. + +[Creative Commons Attribution-ShareAlike 4.0 International]: https://creativecommons.org/licenses/by-sa/4.0/ +[CC0 1.0 Universal]: https://creativecommons.org/publicdomain/zero/1.0/ +[Apache 2 license]: https://www.apache.org/licenses/LICENSE-2.0 diff --git a/meson/docs/markdown/snippets/add_release_note_snippets_here b/meson/docs/markdown/snippets/add_release_note_snippets_here new file mode 100644 index 000000000..cdd57c647 --- /dev/null +++ b/meson/docs/markdown/snippets/add_release_note_snippets_here @@ -0,0 +1,3 @@ +DO NOT ADD ANYTHING TO THIS FILE! + +Add release note snippets to their own files, one file per snippet. diff --git a/meson/docs/meson.build b/meson/docs/meson.build new file mode 100644 index 000000000..73693353d --- /dev/null +++ b/meson/docs/meson.build @@ -0,0 +1,41 @@ +project('Meson documentation', version: '1.0') + +cur_bdir = meson.current_build_dir() + +# Only the script knows which files are being generated +docs_gen = custom_target( + 'gen_docs', + input: files('markdown/index.md'), + output: 'gen_docs.stamp', + command: [ + files('../tools/regenerate_docs.py'), + '--output-dir', cur_bdir, + '--dummy-output-file', '@OUTPUT@', + ], + build_by_default: true, + install: false) + +hotdoc = import('hotdoc') +documentation = hotdoc.generate_doc(meson.project_name(), + project_version: meson.project_version(), + sitemap: 'sitemap.txt', + build_by_default: true, + depends: docs_gen, + index: 'markdown/index.md', + install: false, + extra_assets: ['images/'], + include_paths: ['markdown', cur_bdir], + default_license: 'CC-BY-SAv4.0', + html_extra_theme: join_paths('theme', 'extra'), + git_upload_repository: 'git@github.com:mesonbuild/mesonbuild.github.io.git', + edit_on_github_repository: 'https://github.com/mesonbuild/meson', + syntax_highlighting_activate: true, +) + +run_target('upload', + command: [find_program('hotdoc'), 'run', + '--conf-file', documentation.config_path(), + '--git-upload', + '-vv', + ], +) diff --git a/meson/docs/sitemap.txt b/meson/docs/sitemap.txt new file mode 100644 index 000000000..d96d86597 --- /dev/null +++ b/meson/docs/sitemap.txt @@ -0,0 +1,136 @@ +index.md + SimpleStart.md + Getting-meson.md + Getting-meson_zh.md + Getting-meson_ptbr.md + Quick-guide.md + Tutorial.md + Manual.md + Overview.md + Running-Meson.md + Commands.md + Builtin-options.md + Using-with-Visual-Studio.md + Meson-sample.md + Syntax.md + Machine-files.md + Native-environments.md + Build-targets.md + Include-directories.md + Installing.md + Adding-arguments.md + Configuration.md + Compiler-properties.md + Dependencies.md + Threads.md + External-commands.md + Precompiled-headers.md + Unity-builds.md + Feature-autodetection.md + Generating-sources.md + Unit-tests.md + Cross-compilation.md + Localisation.md + Build-options.md + Subprojects.md + Disabler.md + Code-formatting.md + Modules.md + CMake-module.md + Cuda-module.md + Dlang-module.md + External-Project-module.md + Fs-module.md + Gnome-module.md + Hotdoc-module.md + Icestorm-module.md + Keyval-module.md + Pkgconfig-module.md + Python-3-module.md + Python-module.md + Qt4-module.md + Qt5-module.md + Qt6-module.md + RPM-module.md + Rust-module.md + Simd-module.md + SourceSet-module.md + Windows-module.md + i18n-module.md + Java.md + Vala.md + D.md + Cython.md + IDE-integration.md + Custom-build-targets.md + Build-system-converters.md + Configuring-a-build-directory.md + Run-targets.md + Creating-releases.md + Creating-OSX-packages.md + Creating-Linux-binaries.md + Project-templates.md + Reference-manual.md + Reference-tables.md + Style-guide.md + Rewriter.md + FAQ.md + Reproducible-builds.md + howtox.md + Wrapdb-projects.md + Wrap-dependency-system-manual.md + Adding-new-projects-to-wrapdb.md + Using-the-WrapDB.md + Using-wraptool.md + Wrap-best-practices-and-tips.md + Wrap-review-guidelines.md + Shipping-prebuilt-binaries-as-wraps.md + fallback-wraptool.md + Release-notes.md + Release-notes-for-0.59.0.md + Release-notes-for-0.58.0.md + Release-notes-for-0.57.0.md + Release-notes-for-0.56.0.md + Release-notes-for-0.55.0.md + Release-notes-for-0.54.0.md + Release-notes-for-0.53.0.md + Release-notes-for-0.52.0.md + Release-notes-for-0.51.0.md + Release-notes-for-0.50.0.md + Release-notes-for-0.49.0.md + Release-notes-for-0.48.0.md + Release-notes-for-0.47.0.md + Release-notes-for-0.46.0.md + Release-notes-for-0.45.0.md + Release-notes-for-0.44.0.md + Release-notes-for-0.43.0.md + Release-notes-for-0.42.0.md + Release-notes-for-0.41.0.md + Release-notes-for-0.40.0.md + Release-notes-for-0.39.0.md + Release-notes-for-0.38.0.md + Release-notes-for-0.37.0.md + Additional.md + Release-procedure.md + Performance-comparison.md + ARM-performance-test.md + Simple-comparison.md + Comparisons.md + Conference-presentations.md + Contact-information.md + Continuous-Integration.md + Design-rationale.md + IndepthTutorial.md + In-the-press.md + Mixing-build-systems.md + Pkg-config-files.md + Playground.md + Porting-from-autotools.md + Use-of-Python.md + Users.md + Using-multiple-build-directories.md + Vs-External.md + Contributing.md + MesonCI.md + legal.md + Videos.md diff --git a/meson/docs/theme/extra/images/favicon.png b/meson/docs/theme/extra/images/favicon.png new file mode 100644 index 000000000..6800fe80e Binary files /dev/null and b/meson/docs/theme/extra/images/favicon.png differ diff --git a/meson/docs/theme/extra/images/meson_logo.png b/meson/docs/theme/extra/images/meson_logo.png new file mode 100644 index 000000000..1b3915d39 Binary files /dev/null and b/meson/docs/theme/extra/images/meson_logo.png differ diff --git a/meson/docs/theme/extra/prism_components/prism-meson.js b/meson/docs/theme/extra/prism_components/prism-meson.js new file mode 100644 index 000000000..242af19b4 --- /dev/null +++ b/meson/docs/theme/extra/prism_components/prism-meson.js @@ -0,0 +1,16 @@ +Prism.languages.meson= { + 'triple-quoted-string': { + 'pattern': /'''[\s\S]*?'''/, + 'alias': 'string' + }, + 'comment': /#.*/, + 'string': /'(?:\\'|[^'])*'/, + 'number': /\b\d+(?:\.\d+)?\b/, + 'keyword': /\b(?:if|else|elif|endif|foreach|endforeach)\b/, + 'function': /(?=\.|\b)[a-zA-Z_]+\s*(?=\()/, + 'boolean': /\b(?:true|false)\b/, + 'builtin': /\b(?:meson|host_machine|target_machine|build_machine)(?=\.)/, + 'operator': /(?:[<>=*+\-/!]?=|%|\/|\*|-|\+|\b(?:or|and|not)\b)/, + 'punctuation': /[(),[\]]/ + // TODO: Handle ternary ?: +}; \ No newline at end of file diff --git a/meson/docs/theme/extra/prism_components/prism-meson.min.js b/meson/docs/theme/extra/prism_components/prism-meson.min.js new file mode 100644 index 000000000..7bf90e06f --- /dev/null +++ b/meson/docs/theme/extra/prism_components/prism-meson.min.js @@ -0,0 +1 @@ +Prism.languages.meson={"triple-quoted-string":{pattern:/'''[\s\S]*?'''/,alias:"string"},comment:/#.*/,string:/'(?:\\'|[^'])*'/,number:/\b\d+(?:\.\d+)?\b/,keyword:/\b(?:if|else|elif|endif|foreach|endforeach)\b/,"function":/(?=\.|\b)[a-zA-Z_]+\s*(?=\()/,"boolean":/\b(?:true|false)\b/,builtin:/\b(?:meson|host_machine|target_machine|build_machine)(?=\.)/,operator:/(?:[<>=*+\-\/!]?=|%|\/|\*|-|\+|\b(?:or|and|not)\b)/,punctuation:/[(),[\]]/}; \ No newline at end of file diff --git a/meson/docs/theme/extra/templates/brand-logo.html b/meson/docs/theme/extra/templates/brand-logo.html new file mode 100644 index 000000000..7a12347d9 --- /dev/null +++ b/meson/docs/theme/extra/templates/brand-logo.html @@ -0,0 +1 @@ +Home diff --git a/meson/docs/theme/extra/templates/extra_head.html b/meson/docs/theme/extra/templates/extra_head.html new file mode 100644 index 000000000..29b7477ba --- /dev/null +++ b/meson/docs/theme/extra/templates/extra_head.html @@ -0,0 +1,2 @@ + + diff --git a/meson/docs/theme/extra/templates/license.html b/meson/docs/theme/extra/templates/license.html new file mode 100644 index 000000000..551878295 --- /dev/null +++ b/meson/docs/theme/extra/templates/license.html @@ -0,0 +1,7 @@ +@require(license, logo_path) + +
+ +
+ Website licensing information are available on the Legal page. +
diff --git a/meson/docs/theme/extra/templates/navbar_center.html b/meson/docs/theme/extra/templates/navbar_center.html new file mode 100644 index 000000000..9934be792 --- /dev/null +++ b/meson/docs/theme/extra/templates/navbar_center.html @@ -0,0 +1 @@ +

The Meson Build System

diff --git a/meson/docs/theme/extra/templates/navbar_links.html b/meson/docs/theme/extra/templates/navbar_links.html new file mode 100644 index 000000000..904a4996e --- /dev/null +++ b/meson/docs/theme/extra/templates/navbar_links.html @@ -0,0 +1,53 @@ +@require(page) + + +\ + diff --git a/meson/ghwt.py b/meson/ghwt.py new file mode 100755 index 000000000..6f9373b2c --- /dev/null +++ b/meson/ghwt.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 + +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ghwt - GitHub WrapTool +# +# An emergency wraptool(1) replacement downloader that downloads +# directly from GitHub in case wrapdb.mesonbuild.com is down. + +import urllib.request, json, sys, os, shutil, subprocess +import configparser, hashlib + +req_timeout = 600.0 +private_repos = {'meson', 'wrapweb', 'meson-ci'} +spdir = 'subprojects' + +def gh_get(url): + r = urllib.request.urlopen(url, timeout=req_timeout) + jd = json.loads(r.read().decode('utf-8')) + return jd + +def list_projects(): + jd = gh_get('https://api.github.com/orgs/mesonbuild/repos') + entries = [entry['name'] for entry in jd] + entries = [e for e in entries if e not in private_repos] + entries.sort() + for i in entries: + print(i) + return 0 + +def unpack(sproj, branch): + tmpdir = os.path.join(spdir, sproj + '_ghwt') + shutil.rmtree(tmpdir, ignore_errors=True) + subprocess.check_call(['git', 'clone', '-b', branch, f'https://github.com/mesonbuild/{sproj}.git', tmpdir]) + usfile = os.path.join(tmpdir, 'upstream.wrap') + assert(os.path.isfile(usfile)) + config = configparser.ConfigParser(interpolation=None) + config.read(usfile) + outdir = os.path.join(spdir, sproj) + if 'directory' in config['wrap-file']: + outdir = os.path.join(spdir, config['wrap-file']['directory']) + if os.path.isdir(outdir): + print(f'Subproject is already there. To update, nuke the {outdir} dir and reinstall.') + shutil.rmtree(tmpdir) + return 1 + us_url = config['wrap-file']['source_url'] + us = urllib.request.urlopen(us_url, timeout=req_timeout).read() + h = hashlib.sha256() + h.update(us) + dig = h.hexdigest() + should = config['wrap-file']['source_hash'] + if dig != should: + print('Incorrect hash on download.') + print(' expected:', should) + print(' obtained:', dig) + return 1 + ofilename = os.path.join(spdir, config['wrap-file']['source_filename']) + with open(ofilename, 'wb') as ofile: + ofile.write(us) + if 'lead_directory_missing' in config['wrap-file']: + os.mkdir(outdir) + shutil.unpack_archive(ofilename, outdir) + else: + shutil.unpack_archive(ofilename, spdir) + assert(os.path.isdir(outdir)) + shutil.move(os.path.join(tmpdir, '.git'), outdir) + subprocess.check_call(['git', 'reset', '--hard'], cwd=outdir) + shutil.rmtree(tmpdir) + shutil.rmtree(os.path.join(outdir, '.git')) + os.unlink(ofilename) + +def install(sproj, requested_branch=None): + if not os.path.isdir(spdir): + print('Run this in your source root and make sure there is a subprojects directory in it.') + return 1 + blist = gh_get(f'https://api.github.com/repos/mesonbuild/{sproj}/branches') + blist = [b['name'] for b in blist] + blist = [b for b in blist if b != 'master'] + blist.sort() + branch = blist[-1] + if requested_branch is not None: + if requested_branch in blist: + branch = requested_branch + else: + print('Could not find user-requested branch', requested_branch) + print('Available branches for', sproj, ':') + print(blist) + return 1 + print('Using branch', branch) + return unpack(sproj, branch) + +def print_help(): + print('Usage:') + print(sys.argv[0], 'list') + print(sys.argv[0], 'install', 'package_name', '[branch_name]') + +def run(args): + if not args or args[0] == '-h' or args[0] == '--help': + print_help() + return 1 + command = args[0] + args = args[1:] + if command == 'list': + list_projects() + return 0 + elif command == 'install': + if len(args) == 1: + return install(args[0]) + elif len(args) == 2: + return install(args[0], args[1]) + else: + print_help() + return 1 + else: + print('Unknown command') + return 1 + +if __name__ == '__main__': + print('This is an emergency wrap downloader. Use only when wrapdb is down.') + sys.exit(run(sys.argv[1:])) diff --git a/meson/graphics/meson_logo.svg b/meson/graphics/meson_logo.svg new file mode 100644 index 000000000..d5b47bcf4 --- /dev/null +++ b/meson/graphics/meson_logo.svg @@ -0,0 +1,340 @@ + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/meson/graphics/meson_logo_big.png b/meson/graphics/meson_logo_big.png new file mode 100644 index 000000000..e2abe1b24 Binary files /dev/null and b/meson/graphics/meson_logo_big.png differ diff --git a/meson/graphics/wrap_logo.svg b/meson/graphics/wrap_logo.svg new file mode 100644 index 000000000..defc2d86f --- /dev/null +++ b/meson/graphics/wrap_logo.svg @@ -0,0 +1,70 @@ + + + + + + + + + + + + image/svg+xml + + + + + + + + + diff --git a/meson/man/meson.1 b/meson/man/meson.1 new file mode 100644 index 000000000..856b5c172 --- /dev/null +++ b/meson/man/meson.1 @@ -0,0 +1,238 @@ +.TH MESON "1" "October 2021" "meson 0.59.3" "User Commands" +.SH NAME +meson - a high productivity build system +.SH DESCRIPTION + +Meson is a build system designed to optimize programmer +productivity. It aims to do this by providing simple, out-of-the-box +support for modern software development tools and practices, such as +unit tests, coverage reports, Valgrind, Ccache and the like. + +The main Meson executable provides many subcommands to access all +the functionality. + +.SH The setup command + +Using Meson is simple and follows the common two-phase +process of most build systems. First you run Meson to +configure your build: + +.B meson setup [ +.I options +.B ] [ +.I build directory +.B ] [ +.I source directory +.B ] + +Note that the build directory must be different from the source +directory. Meson does not support building inside the source directory +and attempting to do that leads to an error. + +After a successful configuration step you can build the source by +running the actual build command in the build directory. The default +backend of Meson is Ninja, which can be invoked like this. + +\fBninja [\fR \fItarget\fR \fB]\fR + +You only need to run the Meson command once: when you first configure +your build dir. After that you just run the build command. Meson will +autodetect changes in your source tree and regenerate all files +needed to build the project. + +The setup command is the default operation. If no actual command is +specified, Meson will assume you meant to do a setup. That means +that you can set up a build directory without the setup command +like this: + +.B meson [ +.I options +.B ] [ +.I build directory +.B ] [ +.I source directory +.B ] + +.SS "options:" +.TP +\fB\-\-version\fR +print version number +.TP +\fB\-\-help\fR +print command line help + +.SH The configure command + +.B meson configure +provides a way to configure a Meson project from the command line. +Its usage is simple: + +.B meson configure [ +.I build directory +.B ] [ +.I options to set +.B ] + +If build directory is omitted, the current directory is used instead. + +If no parameters are set, +.B meson configure +will print the value of all build options to the console. + +To set values, use the \-D command line argument like this. + +.B meson configure \-Dopt1=value1 \-Dopt2=value2 + +.SH The introspect command + +Meson introspect is a command designed to make it simple to integrate with +other tools, such as IDEs. The output of this command is in JSON. + +.B meson introspect [ +.I build directory +.B ] [ +.I option +.B ] + +If build directory is omitted, the current directory is used instead. + +.SS "options:" +.TP +\fB\-\-targets\fR +print all top level targets (executables, libraries, etc) +.TP +\fB\-\-target\-files\fR +print the source files of the given target +.TP +\fB\-\-buildsystem\-files\fR +print all files that make up the build system (meson.build, meson_options.txt etc) +.TP +\fB\-\-tests\fR +print all unit tests +.TP +\fB\-\-help\fR +print command line help + +.SH The test command + +.B meson test +is a helper tool for running test suites of projects using Meson. +The default way of running tests is to invoke the default build command: + +\fBninja [\fR \fItest\fR \fB]\fR + +.B meson test +provides a richer set of tools for invoking tests. + +.B meson test +automatically rebuilds the necessary targets to run tests when used with the Ninja backend. +Upon build failure, +.B meson test +will return an exit code of 125. +This return code tells +.B git bisect run +to skip the current commit. +Thus bisecting using git can be done conveniently like this. + +.B git bisect run meson test -C build_dir + +.SS "options:" +.TP +\fB\-\-repeat\fR +run tests as many times as specified +.TP +\fB\-\-gdb\fR +run tests under gdb +.TP +\fB\-\-list\fR +list all available tests +.TP +\fB\-\-wrapper\fR +invoke all tests via the given wrapper (e.g. valgrind) +.TP +\fB\-C\fR +Change into the given directory before running tests (must be root of build directory). +.TP +\fB\-\-suite\fR +run tests in this suite +.TP +\fB\-\-no\-suite\fR +do not run tests in this suite +.TP +\fB\-\-no\-stdsplit\fR +do not split stderr and stdout in test logs +.TP +\fB\-\-benchmark\fR +run benchmarks instead of tests +.TP +\fB\-\-logbase\fR +base of file name to use for writing test logs +.TP +\fB\-\-num-processes\fR +how many parallel processes to use to run tests +.TP +\fB\-\-verbose\fR +do not redirect stdout and stderr +.TP +\fB\-t\fR +a multiplier to use for test timeout values (usually something like 100 for Valgrind) +.TP +\fB\-\-setup\fR +use the specified test setup + +.SH The wrap command + +Wraptool is a helper utility to manage source dependencies +using the online wrapdb service. + +.B meson wrap < +.I command +.B > [ +.I options +.B ] + +You should run this command in the top level source directory +of your project. + +.SS "Commands:" +.TP +\fBlist\fR +list all available projects +.TP +\fBsearch\fR +search projects by name +.TP +\fBinstall\fR +install a project with the given name +.TP +\fBupdate\fR +update the specified project to latest available version +.TP +\fBinfo\fR +show available versions of the specified project +.TP +\fBstatus\fR +show installed and available versions of currently used subprojects + +.SH EXIT STATUS + +.TP +.B 0 +Successful. +.TP +.B 1 +Usage error, or an error parsing or executing meson.build. +.TP +.B 2 +Internal error. +.TP +.B 125 +.B meson test +could not rebuild the required targets. +.TP + +.SH SEE ALSO + +http://mesonbuild.com/ + +https://wrapdb.mesonbuild.com/ diff --git a/meson/manual tests/1 wrap/main.c b/meson/manual tests/1 wrap/main.c new file mode 100644 index 000000000..df6abe434 --- /dev/null +++ b/meson/manual tests/1 wrap/main.c @@ -0,0 +1,12 @@ +#include +#include + +int main(void) { + sqlite3 *db; + if(sqlite3_open(":memory:", &db) != SQLITE_OK) { + printf("Sqlite failed.\n"); + return 1; + } + sqlite3_close(db); + return 0; +} diff --git a/meson/manual tests/1 wrap/meson.build b/meson/manual tests/1 wrap/meson.build new file mode 100644 index 000000000..aee358d88 --- /dev/null +++ b/meson/manual tests/1 wrap/meson.build @@ -0,0 +1,13 @@ +project('downloader', 'c') + +cc = meson.get_compiler('c') + +s = subproject('sqlite').get_variable('sqlite_dep') +th = dependency('threads') + +libdl = cc.find_library('dl', required : false) + +e = executable('dtest', 'main.c', + dependencies : [th, libdl, s]) + +test('dltest', e) diff --git a/meson/manual tests/1 wrap/subprojects/sqlite.wrap b/meson/manual tests/1 wrap/subprojects/sqlite.wrap new file mode 100644 index 000000000..6d14949e6 --- /dev/null +++ b/meson/manual tests/1 wrap/subprojects/sqlite.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = sqlite-amalgamation-3080802 + +source_url = http://sqlite.com/2015/sqlite-amalgamation-3080802.zip +source_filename = sqlite-amalgamation-3080802.zip +source_hash = 5ebeea0dfb75d090ea0e7ff84799b2a7a1550db3fe61eb5f6f61c2e971e57663 + +patch_url = https://wrapdb.mesonbuild.com/v1/projects/sqlite/3080802/5/get_zip +patch_filename = sqlite-3080802-5-wrap.zip +patch_hash = d66469a73fa1344562d56a1d7627d5d0ee4044a77b32d16cf4bbb85741d4c9fd diff --git a/meson/manual tests/10 svn wrap/meson.build b/meson/manual tests/10 svn wrap/meson.build new file mode 100644 index 000000000..23ef1f10d --- /dev/null +++ b/meson/manual tests/10 svn wrap/meson.build @@ -0,0 +1,10 @@ +project('Subversion outchecker', 'c') + +sp = subproject('samplesubproject') + +exe = executable('gitprog', 'prog.c', +include_directories : sp.get_variable('subproj_inc'), +link_with : sp.get_variable('subproj_lib'), +) + +test('maintest', exe) diff --git a/meson/manual tests/10 svn wrap/prog.c b/meson/manual tests/10 svn wrap/prog.c new file mode 100644 index 000000000..6e2c4d861 --- /dev/null +++ b/meson/manual tests/10 svn wrap/prog.c @@ -0,0 +1,6 @@ +#include"subproj.h" + +int main(void) { + subproj_function(); + return 0; +} diff --git a/meson/manual tests/10 svn wrap/subprojects/samplesubproject.wrap b/meson/manual tests/10 svn wrap/subprojects/samplesubproject.wrap new file mode 100644 index 000000000..c8a687e3c --- /dev/null +++ b/meson/manual tests/10 svn wrap/subprojects/samplesubproject.wrap @@ -0,0 +1,4 @@ +[wrap-svn] +directory=samplesubproject +url=https://svn.code.sf.net/p/mesonsubproject/code/trunk +revision=head diff --git a/meson/manual tests/11 wrap imposter/meson.build b/meson/manual tests/11 wrap imposter/meson.build new file mode 100644 index 000000000..d0575acf7 --- /dev/null +++ b/meson/manual tests/11 wrap imposter/meson.build @@ -0,0 +1,8 @@ +project('evil URL') +# showing that new Meson wrap.py code tries to stop imposter WrapDB URLs +# a WrapException is raised. +# +# ERROR: https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip may be a WrapDB-impersonating URL +# + +subproject('zlib') \ No newline at end of file diff --git a/meson/manual tests/11 wrap imposter/subprojects/zlib.wrap b/meson/manual tests/11 wrap imposter/subprojects/zlib.wrap new file mode 100644 index 000000000..b88f8f2ab --- /dev/null +++ b/meson/manual tests/11 wrap imposter/subprojects/zlib.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = zlib-1.2.8 + +source_url = https://zlib.net/zlib-1.2.11.tar.gz +source_filename = zlib-1.2.11.tar.gz +source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1 + +patch_url = https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip +patch_filename = zlib-1.2.11-4-wrap.zip +patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4 \ No newline at end of file diff --git a/meson/manual tests/12 wrap mirror/meson.build b/meson/manual tests/12 wrap mirror/meson.build new file mode 100644 index 000000000..6645bdf26 --- /dev/null +++ b/meson/manual tests/12 wrap mirror/meson.build @@ -0,0 +1,4 @@ +project('downloader') +# this test will timeout, showing that a subdomain isn't caught as masquarading url + +subproject('zlib') diff --git a/meson/manual tests/12 wrap mirror/subprojects/zlib.wrap b/meson/manual tests/12 wrap mirror/subprojects/zlib.wrap new file mode 100644 index 000000000..de0b9ad07 --- /dev/null +++ b/meson/manual tests/12 wrap mirror/subprojects/zlib.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = zlib-1.2.8 + +source_url = https://zlib.net/zlib-1.2.11.tar.gz +source_filename = zlib-1.2.11.tar.gz +source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1 + +patch_url = https://mirror1.wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/4/get_zip +patch_filename = zlib-1.2.11-4-wrap.zip +patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4 \ No newline at end of file diff --git a/meson/manual tests/2 multiwrap/meson.build b/meson/manual tests/2 multiwrap/meson.build new file mode 100644 index 000000000..a4c42f468 --- /dev/null +++ b/meson/manual tests/2 multiwrap/meson.build @@ -0,0 +1,12 @@ +project('multiwrap', 'c', + default_options : 'c_std=c99') + +# Using multiple downloaded projects for great justice. + +cc = meson.get_compiler('c') + +luadep = dependency('lua', fallback : ['lua', 'lua_dep']) +pngdep = dependency('libpng', fallback : ['libpng', 'png_dep']) + +executable('prog', 'prog.c', + dependencies : [pngdep, luadep]) diff --git a/meson/manual tests/2 multiwrap/prog.c b/meson/manual tests/2 multiwrap/prog.c new file mode 100644 index 000000000..dd0349e2d --- /dev/null +++ b/meson/manual tests/2 multiwrap/prog.c @@ -0,0 +1,66 @@ +#include +#include +#include +#include +#include +#if !defined(_MSC_VER) +#include +#endif + +static void *l_alloc (void *ud, void *ptr, size_t osize, + size_t nsize) { + (void)ud; + (void)osize; + if (nsize == 0) { + free(ptr); + return NULL; + } else { + return realloc(ptr, nsize); + } +} + +void open_image(const char *fname) { + png_image image; + + memset(&image, 0, (sizeof image)); + image.version = PNG_IMAGE_VERSION; + + if(png_image_begin_read_from_file(&image, fname) != 0) { + png_bytep buffer; + + image.format = PNG_FORMAT_RGBA; + buffer = malloc(PNG_IMAGE_SIZE(image)); + + if(png_image_finish_read(&image, NULL, buffer, 0, NULL) != 0) { + printf("Image %s read failed: %s\n", fname, image.message); + } +// png_free_image(&image); + free(buffer); + } else { + printf("Image %s open failed: %s", fname, image.message); + } +} + +int printer(lua_State *l) { + if(!lua_isstring(l, 1)) { + fprintf(stderr, "Incorrect call.\n"); + return 0; + } + open_image(lua_tostring(l, 1)); + return 0; +} + + +int main(int argc, char **argv) { + lua_State *l = lua_newstate(l_alloc, NULL); + if(!l) { + printf("Lua state allocation failed.\n"); + return 1; + } + lua_register(l, "printer", printer); + lua_getglobal(l, "printer"); + lua_pushliteral(l, "foobar.png"); + lua_call(l, 1, 0); + lua_close(l); + return 0; +} diff --git a/meson/manual tests/2 multiwrap/subprojects/libpng.wrap b/meson/manual tests/2 multiwrap/subprojects/libpng.wrap new file mode 100644 index 000000000..283775c1e --- /dev/null +++ b/meson/manual tests/2 multiwrap/subprojects/libpng.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = libpng-1.6.34 + +source_url = ftp://ftp-osl.osuosl.org/pub/libpng/src/libpng16/libpng-1.6.34.tar.xz +source_filename = libpng-1.6.34.tar.xz +source_hash = 2f1e960d92ce3b3abd03d06dfec9637dfbd22febf107a536b44f7a47c60659f6 + +patch_url = https://wrapdb.mesonbuild.com/v1/projects/libpng/1.6.34/1/get_zip +patch_filename = libpng-1.6.34-1-wrap.zip +patch_hash = 2123806eba8180c164e33a210f2892bbeb2473b69e56aecc786574e9221e6f20 diff --git a/meson/manual tests/2 multiwrap/subprojects/lua.wrap b/meson/manual tests/2 multiwrap/subprojects/lua.wrap new file mode 100644 index 000000000..c1a179a5d --- /dev/null +++ b/meson/manual tests/2 multiwrap/subprojects/lua.wrap @@ -0,0 +1,11 @@ +[wrap-file] +directory = lua-5.3.0 + +source_url = http://www.lua.org/ftp/lua-5.3.0.tar.gz +source_filename = lua-5.3.0.tar.gz +source_hash = ae4a5eb2d660515eb191bfe3e061f2b8ffe94dce73d32cfd0de090ddcc0ddb01 + + +patch_url = https://wrapdb.mesonbuild.com/v1/projects/lua/5.3.0/5/get_zip +patch_filename = lua-5.3.0-5-wrap.zip +patch_hash = 439038309a0700adfb67d764b3fe935ed8601b31f819fc369e1438c6e79334dd diff --git a/meson/manual tests/2 multiwrap/subprojects/zlib.wrap b/meson/manual tests/2 multiwrap/subprojects/zlib.wrap new file mode 100644 index 000000000..6d5896f79 --- /dev/null +++ b/meson/manual tests/2 multiwrap/subprojects/zlib.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = zlib-1.2.8 + +source_url = http://zlib.net/fossils/zlib-1.2.8.tar.gz +source_filename = zlib-1.2.8.tar.gz +source_hash = 36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d + +patch_url = https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.8/8/get_zip +patch_filename = zlib-1.2.8-8-wrap.zip +patch_hash = 17c52a0e0c59ce926d3959005d5cd8178c6c7e2c9a4a1304279a8320c955ac60 diff --git a/meson/manual tests/3 git wrap/meson.build b/meson/manual tests/3 git wrap/meson.build new file mode 100644 index 000000000..7fd5083ee --- /dev/null +++ b/meson/manual tests/3 git wrap/meson.build @@ -0,0 +1,10 @@ +project('git outcheckker', 'c') + +sp = subproject('samplesubproject') + +exe = executable('gitprog', 'prog.c', +include_directories : sp.get_variable('subproj_inc'), +link_with : sp.get_variable('subproj_lib'), +) + +test('maintest', exe) diff --git a/meson/manual tests/3 git wrap/prog.c b/meson/manual tests/3 git wrap/prog.c new file mode 100644 index 000000000..6e2c4d861 --- /dev/null +++ b/meson/manual tests/3 git wrap/prog.c @@ -0,0 +1,6 @@ +#include"subproj.h" + +int main(void) { + subproj_function(); + return 0; +} diff --git a/meson/manual tests/3 git wrap/subprojects/samplesubproject.wrap b/meson/manual tests/3 git wrap/subprojects/samplesubproject.wrap new file mode 100644 index 000000000..f52190b8f --- /dev/null +++ b/meson/manual tests/3 git wrap/subprojects/samplesubproject.wrap @@ -0,0 +1,4 @@ +[wrap-git] +directory=samplesubproject +url=https://github.com/jpakkane/samplesubproject.git +revision=head diff --git a/meson/manual tests/4 standalone binaries/Info.plist b/meson/manual tests/4 standalone binaries/Info.plist new file mode 100644 index 000000000..0f0c90e49 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/Info.plist @@ -0,0 +1,26 @@ + + + + + CFBundleGetInfoString + MyApp + CFBundleExecutable + myapp.sh + CFBundleIdentifier + com.example.me + CFBundleName + myapp + CFBundleIconFile + myapp.icns + CFBundleShortVersionString + 1.0 + CFBundleInfoDictionaryVersion + 6.0 + CFBundlePackageType + APPL + IFMajorVersion + 0 + IFMinorVersion + 1 + + diff --git a/meson/manual tests/4 standalone binaries/build_linux_package.sh b/meson/manual tests/4 standalone binaries/build_linux_package.sh new file mode 100755 index 000000000..783981ef5 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/build_linux_package.sh @@ -0,0 +1,12 @@ +#!/bin/sh -eu + +curdir=`pwd` +rm -rf buildtmp +mkdir buildtmp +LDFLAGS=-static-libstdc++ ~/meson/meson.py buildtmp --buildtype=release --prefix=/tmp/myapp --libdir=lib --strip +ninja -C buildtmp install +rm -rf buildtmp +cd /tmp/ +tar czf myapp.tar.gz myapp +mv myapp.tar.gz "$curdir" +rm -rf myapp diff --git a/meson/manual tests/4 standalone binaries/build_osx_package.sh b/meson/manual tests/4 standalone binaries/build_osx_package.sh new file mode 100755 index 000000000..8a94ca534 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/build_osx_package.sh @@ -0,0 +1,20 @@ +#!/bin/sh -eu + +rm -rf buildtmp +mkdir buildtmp +~/meson/meson.py buildtmp --buildtype=release --prefix=/tmp/myapp.app --bindir=Contents/MacOS +ninja -C buildtmp install +rm -rf buildtmp +mkdir -p mnttmp +rm -f working.dmg +gunzip < template.dmg.gz > working.dmg +hdiutil attach working.dmg -noautoopen -quiet -mountpoint mnttmp +rm -rf mnttmp/myapp.app +mv /tmp/myapp.app mnttmp +# NOTE: output of hdiutil changes every now and then. +# Verify that this is still working. +hdiutil detach $(hdiutil info|grep "mnttmp"|awk '{print $1}') +rm -rf mnttmp +rm -f myapp.dmg +hdiutil convert working.dmg -quiet -format UDZO -imagekey zlib-level=9 -o myapp.dmg +rm -f working.dmg diff --git a/meson/manual tests/4 standalone binaries/build_windows_package.py b/meson/manual tests/4 standalone binaries/build_windows_package.py new file mode 100755 index 000000000..0932eac09 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/build_windows_package.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 + +import os, urllib.request, shutil, subprocess +from glob import glob + +sdl_url = 'http://libsdl.org/release/SDL2-devel-2.0.3-VC.zip' +sdl_filename = 'SDL2-devel-2.0.3-VC.zip' +sdl_dir = 'SDL2-2.0.3' + +shutil.rmtree('build', ignore_errors=True) +os.mkdir('build') + +if not os.path.exists(sdl_filename): + response = urllib.request.urlopen(sdl_url, timeout=600.0) + data = response.read() + open(sdl_filename, 'wb').write(data) + +shutil.unpack_archive(sdl_filename, 'build') + +libs = glob(os.path.join('build', sdl_dir, 'lib/x86/*')) +[shutil.copy(x, 'build') for x in libs] + +# Sorry for this hack but this needs to work during development +# when Meson is not in path. +subprocess.check_call(['python3', r'..\..\meson.py', 'build', + '--backend=ninja', '--buildtype=release']) +subprocess.check_call(['ninja'], cwd='build') +shutil.copy('myapp.iss', 'build') +subprocess.check_call([r'\Program Files\Inno Setup 5\ISCC.exe', 'myapp.iss'], + cwd='build') +shutil.copy('build/setup.exe', 'myapp 1.0.exe') +shutil.rmtree('build') diff --git a/meson/manual tests/4 standalone binaries/linux_bundler.sh b/meson/manual tests/4 standalone binaries/linux_bundler.sh new file mode 100755 index 000000000..2a8e907fe --- /dev/null +++ b/meson/manual tests/4 standalone binaries/linux_bundler.sh @@ -0,0 +1,7 @@ +#!/bin/sh -eu + +libdir="${MESON_INSTALL_PREFIX}/lib" +mkdir -p $libdir +sdlfile=`ldd ${MESON_INSTALL_PREFIX}/bin/myapp | grep libSDL | cut -d ' ' -f 3` +cp $sdlfile "${libdir}" +strip "${libdir}/libSDL"* diff --git a/meson/manual tests/4 standalone binaries/meson.build b/meson/manual tests/4 standalone binaries/meson.build new file mode 100644 index 000000000..ad6645f5b --- /dev/null +++ b/meson/manual tests/4 standalone binaries/meson.build @@ -0,0 +1,38 @@ +project('myapp', 'cpp') + +sdl = dependency('sdl2', required : host_machine.system() != 'windows') + +if meson.get_compiler('cpp').get_id() != 'msvc' + add_global_arguments('-std=c++11', language : 'cpp') +endif + +if host_machine.system() == 'darwin' + install_data('myapp.sh', + install_dir : 'Contents/MacOS') + + install_data('myapp.icns', + install_dir : 'Contents/Resources') + + install_data('Info.plist', + install_dir : 'Contents') + + meson.add_install_script('osx_bundler.sh') +endif + +if host_machine.system() == 'linux' + install_data('myapp.sh', install_dir : '.') + meson.add_install_script('linux_bundler.sh') +endif + +extra_link_args = [] + +if host_machine.system() == 'windows' + str = '-I@0@/@1@'.format(meson.current_build_dir(), 'SDL2-2.0.3/include') + add_global_arguments(str, language : 'cpp') + extra_link_args = ['/SUBSYSTEM:CONSOLE', 'SDL2main.lib', 'SDL2.lib'] +endif + +prog = executable('myapp', 'myapp.cpp', +dependencies : sdl, +link_args : extra_link_args, +install : true) diff --git a/meson/manual tests/4 standalone binaries/myapp.cpp b/meson/manual tests/4 standalone binaries/myapp.cpp new file mode 100644 index 000000000..8ddff2736 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/myapp.cpp @@ -0,0 +1,39 @@ +#include +#include +#include +#include + +int main(void) { + SDL_Surface *screenSurface; + SDL_Event e; + int keepGoing = 1; + std::string message; + + if(SDL_Init( SDL_INIT_VIDEO ) < 0) { + printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() ); + } + atexit(SDL_Quit); + + std::unique_ptr window(SDL_CreateWindow( "My application", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 640, 480, SDL_WINDOW_SHOWN), SDL_DestroyWindow); + screenSurface = SDL_GetWindowSurface(window.get()); + + // Use iostream to make sure we have not screwed + // up libstdc++ linking. + message = "Window created."; + message += " Starting main loop."; + std::cout << message << std::endl; + + while(keepGoing) { + while(SDL_PollEvent(&e) != 0) { + if(e.type == SDL_QUIT) { + keepGoing = 0; + break; + } + } + SDL_FillRect(screenSurface, NULL, SDL_MapRGB(screenSurface->format, 0xFF, 0x00, 0x00)); + SDL_UpdateWindowSurface(window.get()); + SDL_Delay(100); + } + + return 0; +} diff --git a/meson/manual tests/4 standalone binaries/myapp.icns b/meson/manual tests/4 standalone binaries/myapp.icns new file mode 100644 index 000000000..633195454 Binary files /dev/null and b/meson/manual tests/4 standalone binaries/myapp.icns differ diff --git a/meson/manual tests/4 standalone binaries/myapp.iss b/meson/manual tests/4 standalone binaries/myapp.iss new file mode 100644 index 000000000..2bd441ded --- /dev/null +++ b/meson/manual tests/4 standalone binaries/myapp.iss @@ -0,0 +1,18 @@ +; Innosetup file for My app. + +[Setup] +AppName=My App +AppVersion=1.0 +DefaultDirName={pf}\My App +DefaultGroupName=My App +UninstallDisplayIcon={app}\myapp.exe +Compression=lzma2 +SolidCompression=yes +OutputDir=. + +[Files] +Source: "myapp.exe"; DestDir: "{app}" +Source: "SDL2.dll"; DestDir: "{app}" + +;[Icons] +;Name: "{group}\My App"; Filename: "{app}\myapp.exe" diff --git a/meson/manual tests/4 standalone binaries/myapp.sh b/meson/manual tests/4 standalone binaries/myapp.sh new file mode 100755 index 000000000..319148353 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/myapp.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd "${0%/*}" + +if [ `uname` == 'Darwin' ]; then + ./myapp +else + export LD_LIBRARY_PATH="`pwd`/lib" + bin/myapp +fi diff --git a/meson/manual tests/4 standalone binaries/osx_bundler.sh b/meson/manual tests/4 standalone binaries/osx_bundler.sh new file mode 100755 index 000000000..3bad65f76 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/osx_bundler.sh @@ -0,0 +1,6 @@ +#!/bin/sh -eu + +mkdir -p ${MESON_INSTALL_PREFIX}/Contents/Frameworks +cp -R /Library/Frameworks/SDL2.framework ${MESON_INSTALL_PREFIX}/Contents/Frameworks + +install_name_tool -change @rpath/SDL2.framework/Versions/A/SDL2 @executable_path/../Frameworks/SDL2.framework/Versions/A/SDL2 ${MESON_INSTALL_PREFIX}/Contents/MacOS/myapp diff --git a/meson/manual tests/4 standalone binaries/readme.txt b/meson/manual tests/4 standalone binaries/readme.txt new file mode 100644 index 000000000..991b5c6b8 --- /dev/null +++ b/meson/manual tests/4 standalone binaries/readme.txt @@ -0,0 +1,12 @@ +This directory shows how you can build redistributable binaries. On +OSX this menans building an app bundle and a .dmg installer. On Linux +it means building an archive that bundles its dependencies. On Windows +it means building an .exe installer. + +To build each package you run the corresponding build_ARCH.sh build +script. + +On Linux you must build the package on the oldest distribution you +plan to support (Debian stable/oldstable and old CentOS are the common +choice here). + diff --git a/meson/manual tests/4 standalone binaries/template.dmg.gz b/meson/manual tests/4 standalone binaries/template.dmg.gz new file mode 100644 index 000000000..fcb6d6115 Binary files /dev/null and b/meson/manual tests/4 standalone binaries/template.dmg.gz differ diff --git a/meson/manual tests/5 rpm/lib.c b/meson/manual tests/5 rpm/lib.c new file mode 100644 index 000000000..efc230aab --- /dev/null +++ b/meson/manual tests/5 rpm/lib.c @@ -0,0 +1,6 @@ +#include"lib.h" + +char *meson_print(void) +{ + return "Hello, world!"; +} diff --git a/meson/manual tests/5 rpm/lib.h b/meson/manual tests/5 rpm/lib.h new file mode 100644 index 000000000..08fc9611c --- /dev/null +++ b/meson/manual tests/5 rpm/lib.h @@ -0,0 +1 @@ +char *meson_print(void); diff --git a/meson/manual tests/5 rpm/main.c b/meson/manual tests/5 rpm/main.c new file mode 100644 index 000000000..8b1d193ee --- /dev/null +++ b/meson/manual tests/5 rpm/main.c @@ -0,0 +1,8 @@ +#include +#include +int main(void) +{ + char *t = meson_print(); + printf("%s", t); + return 0; +} diff --git a/meson/manual tests/5 rpm/meson.build b/meson/manual tests/5 rpm/meson.build new file mode 100644 index 000000000..131da3930 --- /dev/null +++ b/meson/manual tests/5 rpm/meson.build @@ -0,0 +1,14 @@ +project('test spec', 'c') + +rpm = import('rpm') +dependency('zlib') +find_program('nonexistprog', required : false) + +lib = shared_library('mesontest_shared', ['lib.c', 'lib.h'], + version : '0.1', soversion : '0', + install : true) +executable('mesontest-bin', 'main.c', + link_with : lib, + install : true) + +rpm.generate_spec_template() diff --git a/meson/manual tests/6 hg wrap/meson.build b/meson/manual tests/6 hg wrap/meson.build new file mode 100644 index 000000000..c7ac004ca --- /dev/null +++ b/meson/manual tests/6 hg wrap/meson.build @@ -0,0 +1,10 @@ +project('Mercurial outcheckker', 'c') + +sp = subproject('samplesubproject') + +exe = executable('gitprog', 'prog.c', +include_directories : sp.get_variable('subproj_inc'), +link_with : sp.get_variable('subproj_lib'), +) + +test('maintest', exe) diff --git a/meson/manual tests/6 hg wrap/prog.c b/meson/manual tests/6 hg wrap/prog.c new file mode 100644 index 000000000..6e2c4d861 --- /dev/null +++ b/meson/manual tests/6 hg wrap/prog.c @@ -0,0 +1,6 @@ +#include"subproj.h" + +int main(void) { + subproj_function(); + return 0; +} diff --git a/meson/manual tests/6 hg wrap/subprojects/samplesubproject.wrap b/meson/manual tests/6 hg wrap/subprojects/samplesubproject.wrap new file mode 100644 index 000000000..6d3b3f2d4 --- /dev/null +++ b/meson/manual tests/6 hg wrap/subprojects/samplesubproject.wrap @@ -0,0 +1,4 @@ +[wrap-hg] +directory=samplesubproject +url=https://bitbucket.org/jpakkane/samplesubproject +revision=tip diff --git a/meson/manual tests/7 vala composite widgets/meson.build b/meson/manual tests/7 vala composite widgets/meson.build new file mode 100644 index 000000000..579ca5198 --- /dev/null +++ b/meson/manual tests/7 vala composite widgets/meson.build @@ -0,0 +1,21 @@ +project('composite', 'vala', 'c') +gnome = import('gnome') +deps = [ + dependency('glib-2.0', version : '>=2.38'), + dependency('gobject-2.0'), + dependency('gtk+-3.0'), +] +res = files('my-resources.xml') +gres = gnome.compile_resources( + 'my', res, + source_dir : '.', +) +executable( + 'demo', + sources : [ + 'mywidget.vala', + gres, + ], + dependencies : deps, + vala_args : ['--gresources', res], +) diff --git a/meson/manual tests/7 vala composite widgets/my-resources.xml b/meson/manual tests/7 vala composite widgets/my-resources.xml new file mode 100644 index 000000000..b5743c193 --- /dev/null +++ b/meson/manual tests/7 vala composite widgets/my-resources.xml @@ -0,0 +1,6 @@ + + + + mywidget.ui + + diff --git a/meson/manual tests/7 vala composite widgets/mywidget.ui b/meson/manual tests/7 vala composite widgets/mywidget.ui new file mode 100644 index 000000000..2d6286ca2 --- /dev/null +++ b/meson/manual tests/7 vala composite widgets/mywidget.ui @@ -0,0 +1,70 @@ + + + + + diff --git a/meson/manual tests/7 vala composite widgets/mywidget.vala b/meson/manual tests/7 vala composite widgets/mywidget.vala new file mode 100644 index 000000000..68eaecc27 --- /dev/null +++ b/meson/manual tests/7 vala composite widgets/mywidget.vala @@ -0,0 +1,41 @@ +using Gtk; + +[GtkTemplate (ui = "/org/foo/my/mywidget.ui")] +public class MyWidget : Box { + public string text { + get { return entry.text; } + set { entry.text = value; } + } + + [GtkChild] + private Entry entry; + + public MyWidget (string text) { + this.text = text; + } + + [GtkCallback] + private void on_button_clicked (Button button) { + print ("The button was clicked with entry text: %s\n", entry.text); + } + + [GtkCallback] + private void on_entry_changed (Editable editable) { + print ("The entry text changed: %s\n", entry.text); + + notify_property ("text"); + } +} + +void main(string[] args) { + Gtk.init (ref args); + var win = new Window(); + win.destroy.connect (Gtk.main_quit); + + var widget = new MyWidget ("The entry text!"); + + win.add (widget); + win.show_all (); + + Gtk.main (); +} diff --git a/meson/manual tests/8 timeout/meson.build b/meson/manual tests/8 timeout/meson.build new file mode 100644 index 000000000..8ba7d4b43 --- /dev/null +++ b/meson/manual tests/8 timeout/meson.build @@ -0,0 +1,8 @@ +project('timeout', 'c') + +# This creates a test that times out. It is a manual test +# because currently there is no test suite for test that are expected +# to fail during unit test phase. + +exe = executable('sleepprog', 'sleepprog.c') +test('timeout', exe, timeout : 1) diff --git a/meson/manual tests/8 timeout/sleepprog.c b/meson/manual tests/8 timeout/sleepprog.c new file mode 100644 index 000000000..8875e126a --- /dev/null +++ b/meson/manual tests/8 timeout/sleepprog.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + sleep(1000); + return 0; +} diff --git a/meson/meson.py b/meson/meson.py new file mode 100755 index 000000000..dab08d3a8 --- /dev/null +++ b/meson/meson.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 + +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from pathlib import Path + +# If we're run uninstalled, add the script directory to sys.path to ensure that +# we always import the correct mesonbuild modules even if PYTHONPATH is mangled +meson_exe = Path(sys.argv[0]).resolve() +if (meson_exe.parent / 'mesonbuild').is_dir(): + sys.path.insert(0, str(meson_exe.parent)) + +from mesonbuild import mesonmain + +if __name__ == '__main__': + sys.exit(mesonmain.main()) diff --git a/meson/mesonbuild/__init__.py b/meson/mesonbuild/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/meson/mesonbuild/_pathlib.py b/meson/mesonbuild/_pathlib.py new file mode 100644 index 000000000..640b5ed21 --- /dev/null +++ b/meson/mesonbuild/_pathlib.py @@ -0,0 +1,73 @@ +# Copyright 2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +''' + This module soly exists to work around a pathlib.resolve bug on + certain Windows systems: + + https://github.com/mesonbuild/meson/issues/7295 + https://bugs.python.org/issue31842 + + It should **never** be used directly. Instead, it is automatically + used when `import pathlib` is used. This is achieved by messing with + `sys.modules['pathlib']` in mesonmain. + + Additionally, the sole purpose of this module is to work around a + python bug. This only bugfixes to pathlib functions and classes are + allowed here. Finally, this file should be removed once all upstream + python bugs are fixed and it is OK to tell our users to "just upgrade + python". +''' + +import pathlib +import os +import platform + +__all__ = [ + 'PurePath', + 'PurePosixPath', + 'PureWindowsPath', + 'Path', +] + +PurePath = pathlib.PurePath +PurePosixPath = pathlib.PurePosixPath +PureWindowsPath = pathlib.PureWindowsPath + +# Only patch on platforms where the bug occurs +if platform.system().lower() in {'windows'}: + # Can not directly inherit from pathlib.Path because the __new__ + # operator of pathlib.Path() returns a {Posix,Windows}Path object. + class Path(type(pathlib.Path())): + def resolve(self, strict: bool = False) -> 'Path': + ''' + Work around a resolve bug on certain Windows systems: + + https://github.com/mesonbuild/meson/issues/7295 + https://bugs.python.org/issue31842 + ''' + + try: + return super().resolve(strict=strict) + except OSError: + return Path(os.path.normpath(self)) +else: + Path = pathlib.Path + PosixPath = pathlib.PosixPath + WindowsPath = pathlib.WindowsPath + + __all__ += [ + 'PosixPath', + 'WindowsPath', + ] diff --git a/meson/mesonbuild/_typing.py b/meson/mesonbuild/_typing.py new file mode 100644 index 000000000..31a6e18b6 --- /dev/null +++ b/meson/mesonbuild/_typing.py @@ -0,0 +1,120 @@ +# SPDX-License-Identifer: Apache-2.0 +# Copyright 2020 The Meson development team +# Copyright © 2020-2021 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Meson specific typing helpers. + +Holds typing helper classes, such as the ImmutableProtocol classes +""" + +__all__ = [ + 'Protocol', + 'ImmutableListProtocol' +] + +import typing + +# We can change this to typing when we require python 3.8 +from typing_extensions import Protocol + + +T = typing.TypeVar('T') + + +class StringProtocol(Protocol): + def __str__(self) -> str: ... + +class SizedStringProtocol(Protocol, StringProtocol, typing.Sized): + pass + +class ImmutableListProtocol(Protocol[T]): + + """A protocol used in cases where a list is returned, but should not be + mutated. + + This provides all of the methods of a Sequence, as well as copy(). copy() + returns a list, which allows mutation as it's a copy and that's (hopefully) + safe. + + One particular case this is important is for cached values, since python is + a pass-by-reference language. + """ + + def __iter__(self) -> typing.Iterator[T]: ... + + @typing.overload + def __getitem__(self, index: int) -> T:... + @typing.overload + def __getitem__(self, index: slice) -> typing.List[T]: ... + + def __contains__(self, item: T) -> bool: ... + + def __reversed__(self) -> typing.Iterator[T]: ... + + def __len__(self) -> int: ... + + def __add__(self, other: typing.List[T]) -> typing.List[T]: ... + + def __eq__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + + def count(self, item: T) -> int: ... + + def index(self, item: T) -> int: ... + + def copy(self) -> typing.List[T]: ... + + +class ImmutableSetProtocol(Protocol[T]): + + """A protocol for a set that cannot be mutated. + + This provides for cases where mutation of the set is undesired. Although + this will be allowed at runtime, mypy (or another type checker), will see + any attempt to use mutative methods as an error. + """ + + def __iter__(self) -> typing.Iterator[T]: ... + + def __contains__(self, item: T) -> bool: ... + + def __len__(self) -> int: ... + + def __add__(self, other: typing.Set[T]) -> typing.Set[T]: ... + + def __eq__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + + def copy(self) -> typing.Set[T]: ... + + def difference(self, other: typing.Set[T]) -> typing.Set[T]: ... + + def intersection(self, other: typing.Set[T]) -> typing.Set[T]: ... + + def issubset(self, other: typing.Set[T]) -> bool: ... + + def issuperset(self, other: typing.Set[T]) -> bool: ... + + def symmetric_difference(self, other: typing.Set[T]) -> typing.Set[T]: ... + + def union(self, other: typing.Set[T]) -> typing.Set[T]: ... diff --git a/meson/mesonbuild/arglist.py b/meson/mesonbuild/arglist.py new file mode 100644 index 000000000..e150d39ad --- /dev/null +++ b/meson/mesonbuild/arglist.py @@ -0,0 +1,334 @@ +# Copyright 2012-2020 The Meson development team +# Copyright © 2020 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import lru_cache +import collections +import enum +import os +import re +import typing as T + +from . import mesonlib + +if T.TYPE_CHECKING: + from .linkers import StaticLinker + from .compilers import Compiler + +UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str] +# execinfo is a compiler lib on FreeBSD and NetBSD +if mesonlib.is_freebsd() or mesonlib.is_netbsd(): + UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo') + + +class Dedup(enum.Enum): + + """What kind of deduplication can be done to compiler args. + + OVERRIDDEN - Whether an argument can be 'overridden' by a later argument. + For example, -DFOO defines FOO and -UFOO undefines FOO. In this case, + we can safely remove the previous occurrence and add a new one. The + same is true for include paths and library paths with -I and -L. + UNIQUE - Arguments that once specified cannot be undone, such as `-c` or + `-pipe`. New instances of these can be completely skipped. + NO_DEDUP - Whether it matters where or how many times on the command-line + a particular argument is present. This can matter for symbol + resolution in static or shared libraries, so we cannot de-dup or + reorder them. + """ + + NO_DEDUP = 0 + UNIQUE = 1 + OVERRIDDEN = 2 + + +class CompilerArgs(collections.abc.MutableSequence): + ''' + List-like class that manages a list of compiler arguments. Should be used + while constructing compiler arguments from various sources. Can be + operated with ordinary lists, so this does not need to be used + everywhere. + + All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) + and can converted to the native type of each compiler by using the + .to_native() method to which you must pass an instance of the compiler or + the compiler class. + + New arguments added to this class (either with .append(), .extend(), or +=) + are added in a way that ensures that they override previous arguments. + For example: + + >>> a = ['-Lfoo', '-lbar'] + >>> a += ['-Lpho', '-lbaz'] + >>> print(a) + ['-Lpho', '-Lfoo', '-lbar', '-lbaz'] + + Arguments will also be de-duped if they can be de-duped safely. + + Note that because of all this, this class is not commutative and does not + preserve the order of arguments if it is safe to not. For example: + >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror'] + >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar'] + ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] + + ''' + # Arg prefixes that override by prepending instead of appending + prepend_prefixes = () # type: T.Tuple[str, ...] + + # Arg prefixes and args that must be de-duped by returning 2 + dedup2_prefixes = () # type: T.Tuple[str, ...] + dedup2_suffixes = () # type: T.Tuple[str, ...] + dedup2_args = () # type: T.Tuple[str, ...] + + # Arg prefixes and args that must be de-duped by returning 1 + # + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = () # type: T.Tuple[str, ...] + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...] + # Match a .so of the form path/to/libfoo.so.0.1.0 + # Only UNIX shared libraries require this. Others have a fixed extension. + dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + dedup1_args = () # type: T.Tuple[str, ...] + # In generate_link() we add external libs without de-dup, but we must + # *always* de-dup these because they're special arguments to the linker + # TODO: these should probably move too + always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...] + + def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], + iterable: T.Optional[T.Iterable[str]] = None): + self.compiler = compiler + self._container = list(iterable) if iterable is not None else [] # type: T.List[str] + self.pre = collections.deque() # type: T.Deque[str] + self.post = collections.deque() # type: T.Deque[str] + + # Flush the saved pre and post list into the _container list + # + # This correctly deduplicates the entries after _can_dedup definition + # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. + def flush_pre_post(self) -> None: + new = list() # type: T.List[str] + pre_flush_set = set() # type: T.Set[str] + post_flush = collections.deque() # type: T.Deque[str] + post_flush_set = set() # type: T.Set[str] + + #The two lists are here walked from the front to the back, in order to not need removals for deduplication + for a in self.pre: + dedup = self._can_dedup(a) + if a not in pre_flush_set: + new.append(a) + if dedup is Dedup.OVERRIDDEN: + pre_flush_set.add(a) + for a in reversed(self.post): + dedup = self._can_dedup(a) + if a not in post_flush_set: + post_flush.appendleft(a) + if dedup is Dedup.OVERRIDDEN: + post_flush_set.add(a) + + #pre and post will overwrite every element that is in the container + #only copy over args that are in _container but not in the post flush or pre flush set + if pre_flush_set or post_flush_set: + for a in self._container: + if a not in post_flush_set and a not in pre_flush_set: + new.append(a) + else: + new.extend(self._container) + new.extend(post_flush) + + self._container = new + self.pre.clear() + self.post.clear() + + def __iter__(self) -> T.Iterator[str]: + self.flush_pre_post() + return iter(self._container) + + @T.overload # noqa: F811 + def __getitem__(self, index: int) -> str: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811 + pass + + def __getitem__(self, index: T.Union[int, slice]) -> T.Union[str, T.MutableSequence[str]]: # noqa: F811 + self.flush_pre_post() + return self._container[index] + + @T.overload # noqa: F811 + def __setitem__(self, index: int, value: str) -> None: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811 + pass + + def __setitem__(self, index: T.Union[int, slice], value: T.Union[str, T.Iterable[str]]) -> None: # noqa: F811 + self.flush_pre_post() + self._container[index] = value # type: ignore # TODO: fix 'Invalid index type' and 'Incompatible types in assignment' erros + + def __delitem__(self, index: T.Union[int, slice]) -> None: + self.flush_pre_post() + del self._container[index] + + def __len__(self) -> int: + return len(self._container) + len(self.pre) + len(self.post) + + def insert(self, index: int, value: str) -> None: + self.flush_pre_post() + self._container.insert(index, value) + + def copy(self) -> 'CompilerArgs': + self.flush_pre_post() + return type(self)(self.compiler, self._container.copy()) + + @classmethod + @lru_cache(maxsize=None) + def _can_dedup(cls, arg: str) -> Dedup: + """Returns whether the argument can be safely de-duped. + + In addition to these, we handle library arguments specially. + With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup + to recursively search for symbols in the libraries. This is not needed + with other linkers. + """ + + # A standalone argument must never be deduplicated because it is + # defined by what comes _after_ it. Thus dedupping this: + # -D FOO -D BAR + # would yield either + # -D FOO BAR + # or + # FOO -D BAR + # both of which are invalid. + if arg in cls.dedup2_prefixes: + return Dedup.NO_DEDUP + if arg in cls.dedup2_args or \ + arg.startswith(cls.dedup2_prefixes) or \ + arg.endswith(cls.dedup2_suffixes): + return Dedup.OVERRIDDEN + if arg in cls.dedup1_args or \ + arg.startswith(cls.dedup1_prefixes) or \ + arg.endswith(cls.dedup1_suffixes) or \ + re.search(cls.dedup1_regex, arg): + return Dedup.UNIQUE + return Dedup.NO_DEDUP + + @classmethod + @lru_cache(maxsize=None) + def _should_prepend(cls, arg: str) -> bool: + return arg.startswith(cls.prepend_prefixes) + + def to_native(self, copy: bool = False) -> T.List[str]: + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + return self.compiler.unix_args_to_native(new._container) + + def append_direct(self, arg: str) -> None: + ''' + Append the specified argument without any reordering or de-dup except + for absolute paths to libraries, etc, which can always be de-duped + safely. + ''' + self.flush_pre_post() + if os.path.isabs(arg): + self.append(arg) + else: + self._container.append(arg) + + def extend_direct(self, iterable: T.Iterable[str]) -> None: + ''' + Extend using the elements in the specified iterable without any + reordering or de-dup except for absolute paths where the order of + include search directories is not relevant + ''' + self.flush_pre_post() + for elem in iterable: + self.append_direct(elem) + + def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None: + normal_flags = [] + lflags = [] + for i in iterable: + if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')): + lflags.append(i) + else: + normal_flags.append(i) + self.extend(normal_flags) + self.extend_direct(lflags) + + def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = self.copy() + new += args + return new + + def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + ''' + Add two CompilerArgs while taking into account overriding of arguments + and while preserving the order of arguments as much as possible + ''' + tmp_pre = collections.deque() # type: T.Deque[str] + if not isinstance(args, collections.abc.Iterable): + raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs') + for arg in args: + # If the argument can be de-duped, do it either by removing the + # previous occurrence of it and adding a new one, or not adding the + # new occurrence. + dedup = self._can_dedup(arg) + if dedup is Dedup.UNIQUE: + # Argument already exists and adding a new instance is useless + if arg in self._container or arg in self.pre or arg in self.post: + continue + if self._should_prepend(arg): + tmp_pre.appendleft(arg) + else: + self.post.append(arg) + self.pre.extendleft(tmp_pre) + #pre and post is going to be merged later before a iter call + return self + + def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = type(self)(self.compiler, args) + new += self + return new + + def __eq__(self, other: object) -> T.Union[bool]: + self.flush_pre_post() + # Only allow equality checks against other CompilerArgs and lists instances + if isinstance(other, CompilerArgs): + return self.compiler == other.compiler and self._container == other._container + elif isinstance(other, list): + return self._container == other + return NotImplemented + + def append(self, arg: str) -> None: + self.__iadd__([arg]) + + def extend(self, args: T.Iterable[str]) -> None: + self.__iadd__(args) + + def __repr__(self) -> str: + self.flush_pre_post() + return f'CompilerArgs({self.compiler!r}, {self._container!r})' diff --git a/meson/mesonbuild/ast/__init__.py b/meson/mesonbuild/ast/__init__.py new file mode 100644 index 000000000..4fb56cb86 --- /dev/null +++ b/meson/mesonbuild/ast/__init__.py @@ -0,0 +1,34 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +__all__ = [ + 'AstConditionLevel', + 'AstInterpreter', + 'AstIDGenerator', + 'AstIndentationGenerator', + 'AstJSONPrinter', + 'AstVisitor', + 'AstPrinter', + 'IntrospectionInterpreter', + 'build_target_functions', +] + +from .interpreter import AstInterpreter +from .introspection import IntrospectionInterpreter, build_target_functions +from .visitor import AstVisitor +from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator +from .printer import AstPrinter, AstJSONPrinter diff --git a/meson/mesonbuild/ast/interpreter.py b/meson/mesonbuild/ast/interpreter.py new file mode 100644 index 000000000..19b3a1d21 --- /dev/null +++ b/meson/mesonbuild/ast/interpreter.py @@ -0,0 +1,424 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +from .visitor import AstVisitor +from .. import mparser, mesonlib +from .. import environment + +from ..interpreterbase import ( + MesonInterpreterObject, + InterpreterBase, + InvalidArguments, + BreakRequest, + ContinueRequest, + default_resolve_key, + TYPE_nvar, + TYPE_nkwargs, +) + +from ..mparser import ( + AndNode, + ArgumentNode, + ArithmeticNode, + ArrayNode, + AssignmentNode, + BaseNode, + ComparisonNode, + ElementaryNode, + EmptyNode, + ForeachClauseNode, + IdNode, + IfClauseNode, + IndexNode, + MethodNode, + NotNode, + OrNode, + PlusAssignmentNode, + TernaryNode, + UMinusNode, +) + +import os, sys +import typing as T + +class DontCareObject(MesonInterpreterObject): + pass + +class MockExecutable(MesonInterpreterObject): + pass + +class MockStaticLibrary(MesonInterpreterObject): + pass + +class MockSharedLibrary(MesonInterpreterObject): + pass + +class MockCustomTarget(MesonInterpreterObject): + pass + +class MockRunTarget(MesonInterpreterObject): + pass + +ADD_SOURCE = 0 +REMOVE_SOURCE = 1 + +_T = T.TypeVar('_T') +_V = T.TypeVar('_V') + +class AstInterpreter(InterpreterBase): + def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None): + super().__init__(source_root, subdir, subproject) + self.visitors = visitors if visitors is not None else [] + self.processed_buildfiles = set() # type: T.Set[str] + self.assignments = {} # type: T.Dict[str, BaseNode] + self.assign_vals = {} # type: T.Dict[str, T.Any] + self.reverse_assignment = {} # type: T.Dict[str, BaseNode] + self.funcs.update({'project': self.func_do_nothing, + 'test': self.func_do_nothing, + 'benchmark': self.func_do_nothing, + 'install_headers': self.func_do_nothing, + 'install_man': self.func_do_nothing, + 'install_data': self.func_do_nothing, + 'install_subdir': self.func_do_nothing, + 'configuration_data': self.func_do_nothing, + 'configure_file': self.func_do_nothing, + 'find_program': self.func_do_nothing, + 'include_directories': self.func_do_nothing, + 'add_global_arguments': self.func_do_nothing, + 'add_global_link_arguments': self.func_do_nothing, + 'add_project_arguments': self.func_do_nothing, + 'add_project_link_arguments': self.func_do_nothing, + 'message': self.func_do_nothing, + 'generator': self.func_do_nothing, + 'error': self.func_do_nothing, + 'run_command': self.func_do_nothing, + 'assert': self.func_do_nothing, + 'subproject': self.func_do_nothing, + 'dependency': self.func_do_nothing, + 'get_option': self.func_do_nothing, + 'join_paths': self.func_do_nothing, + 'environment': self.func_do_nothing, + 'import': self.func_do_nothing, + 'vcs_tag': self.func_do_nothing, + 'add_languages': self.func_do_nothing, + 'declare_dependency': self.func_do_nothing, + 'files': self.func_do_nothing, + 'executable': self.func_do_nothing, + 'static_library': self.func_do_nothing, + 'shared_library': self.func_do_nothing, + 'library': self.func_do_nothing, + 'build_target': self.func_do_nothing, + 'custom_target': self.func_do_nothing, + 'run_target': self.func_do_nothing, + 'subdir': self.func_subdir, + 'set_variable': self.func_do_nothing, + 'get_variable': self.func_do_nothing, + 'is_disabler': self.func_do_nothing, + 'is_variable': self.func_do_nothing, + 'disabler': self.func_do_nothing, + 'gettext': self.func_do_nothing, + 'jar': self.func_do_nothing, + 'warning': self.func_do_nothing, + 'shared_module': self.func_do_nothing, + 'option': self.func_do_nothing, + 'both_libraries': self.func_do_nothing, + 'add_test_setup': self.func_do_nothing, + 'find_library': self.func_do_nothing, + 'subdir_done': self.func_do_nothing, + 'alias_target': self.func_do_nothing, + 'summary': self.func_do_nothing, + 'range': self.func_do_nothing, + }) + + def _unholder_args(self, args: _T, kwargs: _V) -> T.Tuple[_T, _V]: + return args, kwargs + + def _holderify(self, res: _T) -> _T: + return res + + def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> bool: + return True + + def load_root_meson_file(self) -> None: + super().load_root_meson_file() + for i in self.visitors: + self.ast.accept(i) + + def func_subdir(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + args = self.flatten_args(args) + if len(args) != 1 or not isinstance(args[0], str): + sys.stderr.write(f'Unable to evaluate subdir({args}) in AstInterpreter --> Skipping\n') + return + + prev_subdir = self.subdir + subdir = os.path.join(prev_subdir, args[0]) + absdir = os.path.join(self.source_root, subdir) + buildfilename = os.path.join(subdir, environment.build_filename) + absname = os.path.join(self.source_root, buildfilename) + symlinkless_dir = os.path.realpath(absdir) + build_file = os.path.join(symlinkless_dir, 'meson.build') + if build_file in self.processed_buildfiles: + sys.stderr.write('Trying to enter {} which has already been visited --> Skipping\n'.format(args[0])) + return + self.processed_buildfiles.add(build_file) + + if not os.path.isfile(absname): + sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n') + return + with open(absname, encoding='utf-8') as f: + code = f.read() + assert(isinstance(code, str)) + try: + codeblock = mparser.Parser(code, absname).parse() + except mesonlib.MesonException as me: + me.file = absname + raise me + + self.subdir = subdir + for i in self.visitors: + codeblock.accept(i) + self.evaluate_codeblock(codeblock) + self.subdir = prev_subdir + + def method_call(self, node: BaseNode) -> bool: + return True + + def evaluate_fstring(self, node: mparser.FormatStringNode) -> str: + assert(isinstance(node, mparser.FormatStringNode)) + return node.value + + def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> int: + self.evaluate_statement(cur.left) + self.evaluate_statement(cur.right) + return 0 + + def evaluate_uminusstatement(self, cur: UMinusNode) -> int: + self.evaluate_statement(cur.value) + return 0 + + def evaluate_ternary(self, node: TernaryNode) -> None: + assert(isinstance(node, TernaryNode)) + self.evaluate_statement(node.condition) + self.evaluate_statement(node.trueblock) + self.evaluate_statement(node.falseblock) + + def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs: + def resolve_key(node: mparser.BaseNode) -> str: + if isinstance(node, mparser.StringNode): + return node.value + return '__AST_UNKNOWN__' + arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key) + assert (not arguments) + self.argument_depth += 1 + for key, value in kwargs.items(): + if isinstance(key, BaseNode): + self.evaluate_statement(key) + self.argument_depth -= 1 + return {} + + def evaluate_plusassign(self, node: PlusAssignmentNode) -> None: + assert(isinstance(node, PlusAssignmentNode)) + # Cheat by doing a reassignment + self.assignments[node.var_name] = node.value # Save a reference to the value node + if node.value.ast_id: + self.reverse_assignment[node.value.ast_id] = node + self.assign_vals[node.var_name] = self.evaluate_statement(node.value) + + def evaluate_indexing(self, node: IndexNode) -> int: + return 0 + + def unknown_function_called(self, func_name: str) -> None: + pass + + def reduce_arguments( + self, + args: mparser.ArgumentNode, + key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key, + duplicate_key_error: T.Optional[str] = None, + ) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]: + if isinstance(args, ArgumentNode): + kwargs = {} # type: T.Dict[str, TYPE_nvar] + for key, val in args.kwargs.items(): + kwargs[key_resolver(key)] = val + if args.incorrect_order(): + raise InvalidArguments('All keyword arguments must be after positional arguments.') + return self.flatten_args(args.arguments), kwargs + else: + return self.flatten_args(args), {} + + def evaluate_comparison(self, node: ComparisonNode) -> bool: + self.evaluate_statement(node.left) + self.evaluate_statement(node.right) + return False + + def evaluate_andstatement(self, cur: AndNode) -> bool: + self.evaluate_statement(cur.left) + self.evaluate_statement(cur.right) + return False + + def evaluate_orstatement(self, cur: OrNode) -> bool: + self.evaluate_statement(cur.left) + self.evaluate_statement(cur.right) + return False + + def evaluate_notstatement(self, cur: NotNode) -> bool: + self.evaluate_statement(cur.value) + return False + + def evaluate_foreach(self, node: ForeachClauseNode) -> None: + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + pass + except BreakRequest: + pass + + def evaluate_if(self, node: IfClauseNode) -> None: + for i in node.ifs: + self.evaluate_codeblock(i.block) + if not isinstance(node.elseblock, EmptyNode): + self.evaluate_codeblock(node.elseblock) + + def get_variable(self, varname: str) -> int: + return 0 + + def assignment(self, node: AssignmentNode) -> None: + assert(isinstance(node, AssignmentNode)) + self.assignments[node.var_name] = node.value # Save a reference to the value node + if node.value.ast_id: + self.reverse_assignment[node.value.ast_id] = node + self.assign_vals[node.var_name] = self.evaluate_statement(node.value) # Evaluate the value just in case + + def resolve_node(self, node: BaseNode, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.Optional[T.Any]: + def quick_resolve(n: BaseNode, loop_detect: T.Optional[T.List[str]] = None) -> T.Any: + if loop_detect is None: + loop_detect = [] + if isinstance(n, IdNode): + assert isinstance(n.value, str) + if n.value in loop_detect or n.value not in self.assignments: + return [] + return quick_resolve(self.assignments[n.value], loop_detect = loop_detect + [n.value]) + elif isinstance(n, ElementaryNode): + return n.value + else: + return n + + if id_loop_detect is None: + id_loop_detect = [] + result = None + + if not isinstance(node, BaseNode): + return None + + assert node.ast_id + if node.ast_id in id_loop_detect: + return None # Loop detected + id_loop_detect += [node.ast_id] + + # Try to evealuate the value of the node + if isinstance(node, IdNode): + result = quick_resolve(node) + + elif isinstance(node, ElementaryNode): + result = node.value + + elif isinstance(node, NotNode): + result = self.resolve_node(node.value, include_unknown_args, id_loop_detect) + if isinstance(result, bool): + result = not result + + elif isinstance(node, ArrayNode): + result = [x for x in node.args.arguments] + + elif isinstance(node, ArgumentNode): + result = [x for x in node.arguments] + + elif isinstance(node, ArithmeticNode): + if node.operation != 'add': + return None # Only handle string and array concats + l = quick_resolve(node.left) + r = quick_resolve(node.right) + if isinstance(l, str) and isinstance(r, str): + result = l + r # String concatenation detected + else: + result = self.flatten_args(l, include_unknown_args, id_loop_detect) + self.flatten_args(r, include_unknown_args, id_loop_detect) + + elif isinstance(node, MethodNode): + src = quick_resolve(node.source_object) + margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect) + mkwargs = {} # type: T.Dict[str, TYPE_nvar] + try: + if isinstance(src, str): + result = self.string_method_call(src, node.name, margs, mkwargs) + elif isinstance(src, bool): + result = self.bool_method_call(src, node.name, margs, mkwargs) + elif isinstance(src, int): + result = self.int_method_call(src, node.name, margs, mkwargs) + elif isinstance(src, list): + result = self.array_method_call(src, node.name, margs, mkwargs) + elif isinstance(src, dict): + result = self.dict_method_call(src, node.name, margs, mkwargs) + except mesonlib.MesonException: + return None + + # Ensure that the result is fully resolved (no more nodes) + if isinstance(result, BaseNode): + result = self.resolve_node(result, include_unknown_args, id_loop_detect) + elif isinstance(result, list): + new_res = [] # type: T.List[TYPE_nvar] + for i in result: + if isinstance(i, BaseNode): + resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) + if resolved is not None: + new_res += self.flatten_args(resolved, include_unknown_args, id_loop_detect) + else: + new_res += [i] + result = new_res + + return result + + def flatten_args(self, args_raw: T.Union[TYPE_nvar, T.Sequence[TYPE_nvar]], include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[TYPE_nvar]: + # Make sure we are always dealing with lists + if isinstance(args_raw, list): + args = args_raw + else: + args = [args_raw] + + flattend_args = [] # type: T.List[TYPE_nvar] + + # Resolve the contents of args + for i in args: + if isinstance(i, BaseNode): + resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) + if resolved is not None: + if not isinstance(resolved, list): + resolved = [resolved] + flattend_args += resolved + elif isinstance(i, (str, bool, int, float)) or include_unknown_args: + flattend_args += [i] + return flattend_args + + def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_nvar], include_unknown_args: bool = False) -> T.Dict[str, TYPE_nvar]: + flattend_kwargs = {} + for key, val in kwargs.items(): + if isinstance(val, BaseNode): + resolved = self.resolve_node(val, include_unknown_args) + if resolved is not None: + flattend_kwargs[key] = resolved + elif isinstance(val, (str, bool, int, float)) or include_unknown_args: + flattend_kwargs[key] = val + return flattend_kwargs diff --git a/meson/mesonbuild/ast/introspection.py b/meson/mesonbuild/ast/introspection.py new file mode 100644 index 000000000..42813db66 --- /dev/null +++ b/meson/mesonbuild/ast/introspection.py @@ -0,0 +1,330 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool + +from .interpreter import AstInterpreter +from .visitor import AstVisitor +from .. import compilers, environment, mesonlib, optinterpreter +from .. import coredata as cdata +from ..mesonlib import MachineChoice, OptionKey +from ..interpreterbase import InvalidArguments, TYPE_nvar +from ..build import BuildTarget, Executable, Jar, SharedLibrary, SharedModule, StaticLibrary +from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode +from ..compilers import detect_compiler_for +import typing as T +import os +import argparse + +build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries'] + +class IntrospectionHelper(argparse.Namespace): + # mimic an argparse namespace + def __init__(self, cross_file: str): + super().__init__() + self.cross_file = cross_file # type: str + self.native_file = None # type: str + self.cmd_line_options = {} # type: T.Dict[str, str] + + def __eq__(self, other: object) -> bool: + return NotImplemented + +class IntrospectionInterpreter(AstInterpreter): + # Interpreter to detect the options without a build directory + # Most of the code is stolen from interpreter.Interpreter + def __init__(self, + source_root: str, + subdir: str, + backend: str, + visitors: T.Optional[T.List[AstVisitor]] = None, + cross_file: T.Optional[str] = None, + subproject: str = '', + subproject_dir: str = 'subprojects', + env: T.Optional[environment.Environment] = None): + visitors = visitors if visitors is not None else [] + super().__init__(source_root, subdir, subproject, visitors=visitors) + + options = IntrospectionHelper(cross_file) + self.cross_file = cross_file + if env is None: + self.environment = environment.Environment(source_root, None, options) + else: + self.environment = env + self.subproject_dir = subproject_dir + self.coredata = self.environment.get_coredata() + self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt') + self.backend = backend + self.default_options = {OptionKey('backend'): self.backend} + self.project_data = {} # type: T.Dict[str, T.Any] + self.targets = [] # type: T.List[T.Dict[str, T.Any]] + self.dependencies = [] # type: T.List[T.Dict[str, T.Any]] + self.project_node = None # type: BaseNode + + self.funcs.update({ + 'add_languages': self.func_add_languages, + 'dependency': self.func_dependency, + 'executable': self.func_executable, + 'jar': self.func_jar, + 'library': self.func_library, + 'project': self.func_project, + 'shared_library': self.func_shared_lib, + 'shared_module': self.func_shared_module, + 'static_library': self.func_static_lib, + 'both_libraries': self.func_both_lib, + }) + + def func_project(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + if self.project_node: + raise InvalidArguments('Second call to project()') + self.project_node = node + if len(args) < 1: + raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') + + proj_name = args[0] + proj_vers = kwargs.get('version', 'undefined') + proj_langs = self.flatten_args(args[1:]) + if isinstance(proj_vers, ElementaryNode): + proj_vers = proj_vers.value + if not isinstance(proj_vers, str): + proj_vers = 'undefined' + self.project_data = {'descriptive_name': proj_name, 'version': proj_vers} + + if os.path.exists(self.option_file): + oi = optinterpreter.OptionInterpreter(self.subproject) + oi.process(self.option_file) + self.coredata.update_project_options(oi.options) + + def_opts = self.flatten_args(kwargs.get('default_options', [])) + _project_default_options = mesonlib.stringlistify(def_opts) + self.project_default_options = cdata.create_options_dict(_project_default_options, self.subproject) + self.default_options.update(self.project_default_options) + self.coredata.set_default_options(self.default_options, self.subproject, self.environment) + + if not self.is_subproject() and 'subproject_dir' in kwargs: + spdirname = kwargs['subproject_dir'] + if isinstance(spdirname, StringNode): + assert isinstance(spdirname.value, str) + self.subproject_dir = spdirname.value + if not self.is_subproject(): + self.project_data['subprojects'] = [] + subprojects_dir = os.path.join(self.source_root, self.subproject_dir) + if os.path.isdir(subprojects_dir): + for i in os.listdir(subprojects_dir): + if os.path.isdir(os.path.join(subprojects_dir, i)): + self.do_subproject(i) + + self.coredata.init_backend_options(self.backend) + options = {k: v for k, v in self.environment.options.items() if k.is_backend()} + + self.coredata.set_options(options) + self._add_languages(proj_langs, MachineChoice.HOST) + self._add_languages(proj_langs, MachineChoice.BUILD) + + def do_subproject(self, dirname: str) -> None: + subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) + subpr = os.path.join(subproject_dir_abs, dirname) + try: + subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment, visitors=self.visitors) + subi.analyze() + subi.project_data['name'] = dirname + self.project_data['subprojects'] += [subi.project_data] + except (mesonlib.MesonException, RuntimeError): + return + + def func_add_languages(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + kwargs = self.flatten_kwargs(kwargs) + if 'native' in kwargs: + native = kwargs.get('native', False) + self._add_languages(args, MachineChoice.BUILD if native else MachineChoice.HOST) + else: + for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]: + self._add_languages(args, for_machine) + + def _add_languages(self, raw_langs: T.List[TYPE_nvar], for_machine: MachineChoice) -> None: + langs = [] # type: T.List[str] + for l in self.flatten_args(raw_langs): + if isinstance(l, str): + langs.append(l) + elif isinstance(l, StringNode): + langs.append(l.value) + + for lang in sorted(langs, key=compilers.sort_clink): + lang = lang.lower() + if lang not in self.coredata.compilers[for_machine]: + detect_compiler_for(self.environment, lang, for_machine) + + def func_dependency(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + args = self.flatten_args(args) + kwargs = self.flatten_kwargs(kwargs) + if not args: + return + name = args[0] + has_fallback = 'fallback' in kwargs + required = kwargs.get('required', True) + version = kwargs.get('version', []) + if not isinstance(version, list): + version = [version] + if isinstance(required, ElementaryNode): + required = required.value + if not isinstance(required, bool): + required = False + self.dependencies += [{ + 'name': name, + 'required': required, + 'version': version, + 'has_fallback': has_fallback, + 'conditional': node.condition_level > 0, + 'node': node + }] + + def build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs_raw: T.Dict[str, TYPE_nvar], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]: + args = self.flatten_args(args) + if not args or not isinstance(args[0], str): + return None + name = args[0] + srcqueue = [node] + extra_queue = [] + + # Process the sources BEFORE flattening the kwargs, to preserve the original nodes + if 'sources' in kwargs_raw: + srcqueue += mesonlib.listify(kwargs_raw['sources']) + + if 'extra_files' in kwargs_raw: + extra_queue += mesonlib.listify(kwargs_raw['extra_files']) + + kwargs = self.flatten_kwargs(kwargs_raw, True) + + def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: + res = [] # type: T.List[BaseNode] + while inqueue: + curr = inqueue.pop(0) + arg_node = None + assert(isinstance(curr, BaseNode)) + if isinstance(curr, FunctionNode): + arg_node = curr.args + elif isinstance(curr, ArrayNode): + arg_node = curr.args + elif isinstance(curr, IdNode): + # Try to resolve the ID and append the node to the queue + assert isinstance(curr.value, str) + var_name = curr.value + if var_name in self.assignments: + tmp_node = self.assignments[var_name] + if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)): + inqueue += [tmp_node] + elif isinstance(curr, ArithmeticNode): + inqueue += [curr.left, curr.right] + if arg_node is None: + continue + arg_nodes = arg_node.arguments.copy() + # Pop the first element if the function is a build target function + if isinstance(curr, FunctionNode) and curr.func_name in build_target_functions: + arg_nodes.pop(0) + elemetary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] + inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))] + if elemetary_nodes: + res += [curr] + return res + + source_nodes = traverse_nodes(srcqueue) + extraf_nodes = traverse_nodes(extra_queue) + + # Make sure nothing can crash when creating the build class + kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in ['install', 'build_by_default', 'build_always']} + kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()} + kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)} + for_machine = MachineChoice.HOST + objects = [] # type: T.List[T.Any] + empty_sources = [] # type: T.List[T.Any] + # Passing the unresolved sources list causes errors + target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, objects, self.environment, kwargs_reduced) + + new_target = { + 'name': target.get_basename(), + 'id': target.get_id(), + 'type': target.get_typename(), + 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)), + 'subdir': self.subdir, + 'build_by_default': target.build_by_default, + 'installed': target.should_install(), + 'outputs': target.get_outputs(), + 'sources': source_nodes, + 'extra_files': extraf_nodes, + 'kwargs': kwargs, + 'node': node, + } + + self.targets += [new_target] + return new_target + + def build_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + default_library = self.coredata.get_option(OptionKey('default_library')) + if default_library == 'shared': + return self.build_target(node, args, kwargs, SharedLibrary) + elif default_library == 'static': + return self.build_target(node, args, kwargs, StaticLibrary) + elif default_library == 'both': + return self.build_target(node, args, kwargs, SharedLibrary) + return None + + def func_executable(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, Executable) + + def func_static_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, StaticLibrary) + + def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, SharedLibrary) + + def func_both_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, SharedLibrary) + + def func_shared_module(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, SharedModule) + + def func_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_library(node, args, kwargs) + + def func_jar(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, Jar) + + def func_build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + if 'target_type' not in kwargs: + return None + target_type = kwargs.pop('target_type') + if isinstance(target_type, ElementaryNode): + target_type = target_type.value + if target_type == 'executable': + return self.build_target(node, args, kwargs, Executable) + elif target_type == 'shared_library': + return self.build_target(node, args, kwargs, SharedLibrary) + elif target_type == 'static_library': + return self.build_target(node, args, kwargs, StaticLibrary) + elif target_type == 'both_libraries': + return self.build_target(node, args, kwargs, SharedLibrary) + elif target_type == 'library': + return self.build_library(node, args, kwargs) + elif target_type == 'jar': + return self.build_target(node, args, kwargs, Jar) + return None + + def is_subproject(self) -> bool: + return self.subproject != '' + + def analyze(self) -> None: + self.load_root_meson_file() + self.sanity_check_ast() + self.parse_project() + self.run() diff --git a/meson/mesonbuild/ast/postprocess.py b/meson/mesonbuild/ast/postprocess.py new file mode 100644 index 000000000..6d808be57 --- /dev/null +++ b/meson/mesonbuild/ast/postprocess.py @@ -0,0 +1,117 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool + +from . import AstVisitor +from .. import mparser +import typing as T + +class AstIndentationGenerator(AstVisitor): + def __init__(self) -> None: + self.level = 0 + + def visit_default_func(self, node: mparser.BaseNode) -> None: + # Store the current level in the node + node.level = self.level + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self.visit_default_func(node) + node.source_object.accept(self) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.items.accept(self) + node.block.accept(self) + self.level -= 1 + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self.visit_default_func(node) + for i in node.ifs: + i.accept(self) + if node.elseblock: + self.level += 1 + node.elseblock.accept(self) + self.level -= 1 + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.condition.accept(self) + node.block.accept(self) + self.level -= 1 + +class AstIDGenerator(AstVisitor): + def __init__(self) -> None: + self.counter = {} # type: T.Dict[str, int] + + def visit_default_func(self, node: mparser.BaseNode) -> None: + name = type(node).__name__ + if name not in self.counter: + self.counter[name] = 0 + node.ast_id = name + '#' + str(self.counter[name]) + self.counter[name] += 1 + +class AstConditionLevel(AstVisitor): + def __init__(self) -> None: + self.condition_level = 0 + + def visit_default_func(self, node: mparser.BaseNode) -> None: + node.condition_level = self.condition_level + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self.visit_default_func(node) + self.condition_level += 1 + node.items.accept(self) + node.block.accept(self) + self.condition_level -= 1 + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self.visit_default_func(node) + for i in node.ifs: + i.accept(self) + if node.elseblock: + self.condition_level += 1 + node.elseblock.accept(self) + self.condition_level -= 1 + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self.visit_default_func(node) + self.condition_level += 1 + node.condition.accept(self) + node.block.accept(self) + self.condition_level -= 1 diff --git a/meson/mesonbuild/ast/printer.py b/meson/mesonbuild/ast/printer.py new file mode 100644 index 000000000..f18544983 --- /dev/null +++ b/meson/mesonbuild/ast/printer.py @@ -0,0 +1,366 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool + +from .. import mparser +from . import AstVisitor +import re +import typing as T + +arithmic_map = { + 'add': '+', + 'sub': '-', + 'mod': '%', + 'mul': '*', + 'div': '/' +} + +class AstPrinter(AstVisitor): + def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5): + self.result = '' + self.indent = indent + self.arg_newline_cutoff = arg_newline_cutoff + self.ci = '' + self.is_newline = True + self.last_level = 0 + + def post_process(self) -> None: + self.result = re.sub(r'\s+\n', '\n', self.result) + + def append(self, data: str, node: mparser.BaseNode) -> None: + self.last_level = node.level + if self.is_newline: + self.result += ' ' * (node.level * self.indent) + self.result += data + self.is_newline = False + + def append_padded(self, data: str, node: mparser.BaseNode) -> None: + if self.result and self.result[-1] not in [' ', '\n']: + data = ' ' + data + self.append(data + ' ', node) + + def newline(self) -> None: + self.result += '\n' + self.is_newline = True + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.append('true' if node.value else 'false', node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + assert isinstance(node.value, str) + self.append(node.value, node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.append(str(node.value), node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + assert isinstance(node.value, str) + self.append("'" + node.value + "'", node) + + def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None: + assert isinstance(node.value, str) + self.append("f'" + node.value + "'", node) + + def visit_ContinueNode(self, node: mparser.ContinueNode) -> None: + self.append('continue', node) + + def visit_BreakNode(self, node: mparser.BreakNode) -> None: + self.append('break', node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self.append('[', node) + node.args.accept(self) + self.append(']', node) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self.append('{', node) + node.args.accept(self) + self.append('}', node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + node.left.accept(self) + self.append_padded('or', node) + node.right.accept(self) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + node.left.accept(self) + self.append_padded('and', node) + node.right.accept(self) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + node.left.accept(self) + self.append_padded(node.ctype, node) + node.right.accept(self) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + node.left.accept(self) + self.append_padded(arithmic_map[node.operation], node) + node.right.accept(self) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self.append_padded('not', node) + node.value.accept(self) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + for i in node.lines: + i.accept(self) + self.newline() + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + node.iobject.accept(self) + self.append('[', node) + node.index.accept(self) + self.append(']', node) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + node.source_object.accept(self) + self.append('.' + node.name + '(', node) + node.args.accept(self) + self.append(')', node) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self.append(node.func_name + '(', node) + node.args.accept(self) + self.append(')', node) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self.append(node.var_name + ' = ', node) + node.value.accept(self) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self.append(node.var_name + ' += ', node) + node.value.accept(self) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + varnames = [x for x in node.varnames] + self.append_padded('foreach', node) + self.append_padded(', '.join(varnames), node) + self.append_padded(':', node) + node.items.accept(self) + self.newline() + node.block.accept(self) + self.append('endforeach', node) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + prefix = '' + for i in node.ifs: + self.append_padded(prefix + 'if', node) + prefix = 'el' + i.accept(self) + if not isinstance(node.elseblock, mparser.EmptyNode): + self.append('else', node) + node.elseblock.accept(self) + self.append('endif', node) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self.append_padded('-', node) + node.value.accept(self) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + node.condition.accept(self) + self.newline() + node.block.accept(self) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + node.condition.accept(self) + self.append_padded('?', node) + node.trueblock.accept(self) + self.append_padded(':', node) + node.falseblock.accept(self) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + break_args = (len(node.arguments) + len(node.kwargs)) > self.arg_newline_cutoff + for i in node.arguments + list(node.kwargs.values()): + if not isinstance(i, (mparser.ElementaryNode, mparser.IndexNode)): + break_args = True + if break_args: + self.newline() + for i in node.arguments: + i.accept(self) + self.append(', ', node) + if break_args: + self.newline() + for key, val in node.kwargs.items(): + key.accept(self) + self.append_padded(':', node) + val.accept(self) + self.append(', ', node) + if break_args: + self.newline() + if break_args: + self.result = re.sub(r', \n$', '\n', self.result) + else: + self.result = re.sub(r', $', '', self.result) + +class AstJSONPrinter(AstVisitor): + def __init__(self) -> None: + self.result = {} # type: T.Dict[str, T.Any] + self.current = self.result + + def _accept(self, key: str, node: mparser.BaseNode) -> None: + old = self.current + data = {} # type: T.Dict[str, T.Any] + self.current = data + node.accept(self) + self.current = old + self.current[key] = data + + def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None: + old = self.current + datalist = [] # type: T.List[T.Dict[str, T.Any]] + for i in nodes: + self.current = {} + i.accept(self) + datalist += [self.current] + self.current = old + self.current[key] = datalist + + def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None: + old = self.current + self.current = data + node.accept(self) + self.current = old + + def setbase(self, node: mparser.BaseNode) -> None: + self.current['node'] = type(node).__name__ + self.current['lineno'] = node.lineno + self.current['colno'] = node.colno + self.current['end_lineno'] = node.end_lineno + self.current['end_colno'] = node.end_colno + + def visit_default_func(self, node: mparser.BaseNode) -> None: + self.setbase(node) + + def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None: + self.current['value'] = node.value + self.setbase(node) + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.gen_ElementaryNode(node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + self.gen_ElementaryNode(node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.gen_ElementaryNode(node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + self.gen_ElementaryNode(node) + + def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None: + self.gen_ElementaryNode(node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['ctype'] = node.ctype + self.setbase(node) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['op'] = arithmic_map[node.operation] + self.setbase(node) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + self._accept_list('lines', node.lines) + self.setbase(node) + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + self._accept('object', node.iobject) + self._accept('index', node.index) + self.setbase(node) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self._accept('object', node.source_object) + self._accept('args', node.args) + self.current['name'] = node.name + self.setbase(node) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self._accept('args', node.args) + self.current['name'] = node.func_name + self.setbase(node) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self._accept('items', node.items) + self._accept('block', node.block) + self.current['varnames'] = node.varnames + self.setbase(node) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self._accept_list('ifs', node.ifs) + self._accept('else', node.elseblock) + self.setbase(node) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self._accept('condition', node.condition) + self._accept('block', node.block) + self.setbase(node) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + self._accept('condition', node.condition) + self._accept('true', node.trueblock) + self._accept('false', node.falseblock) + self.setbase(node) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + self._accept_list('positional', node.arguments) + kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]] + for key, val in node.kwargs.items(): + key_res = {} # type: T.Dict[str, T.Any] + val_res = {} # type: T.Dict[str, T.Any] + self._raw_accept(key, key_res) + self._raw_accept(val, val_res) + kwargs_list += [{'key': key_res, 'val': val_res}] + self.current['kwargs'] = kwargs_list + self.setbase(node) diff --git a/meson/mesonbuild/ast/visitor.py b/meson/mesonbuild/ast/visitor.py new file mode 100644 index 000000000..34a76a8db --- /dev/null +++ b/meson/mesonbuild/ast/visitor.py @@ -0,0 +1,142 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool + +from .. import mparser + +class AstVisitor: + def __init__(self) -> None: + pass + + def visit_default_func(self, node: mparser.BaseNode) -> None: + pass + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.visit_default_func(node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + self.visit_default_func(node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.visit_default_func(node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + self.visit_default_func(node) + + def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None: + self.visit_default_func(node) + + def visit_ContinueNode(self, node: mparser.ContinueNode) -> None: + self.visit_default_func(node) + + def visit_BreakNode(self, node: mparser.BreakNode) -> None: + self.visit_default_func(node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self.visit_default_func(node) + node.args.accept(self) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self.visit_default_func(node) + node.args.accept(self) + + def visit_EmptyNode(self, node: mparser.EmptyNode) -> None: + self.visit_default_func(node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + self.visit_default_func(node) + for i in node.lines: + i.accept(self) + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + self.visit_default_func(node) + node.iobject.accept(self) + node.index.accept(self) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self.visit_default_func(node) + node.source_object.accept(self) + node.args.accept(self) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self.visit_default_func(node) + node.args.accept(self) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self.visit_default_func(node) + node.items.accept(self) + node.block.accept(self) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self.visit_default_func(node) + for i in node.ifs: + i.accept(self) + node.elseblock.accept(self) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self.visit_default_func(node) + node.condition.accept(self) + node.block.accept(self) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + self.visit_default_func(node) + node.condition.accept(self) + node.trueblock.accept(self) + node.falseblock.accept(self) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + self.visit_default_func(node) + for i in node.arguments: + i.accept(self) + for key, val in node.kwargs.items(): + key.accept(self) + val.accept(self) diff --git a/meson/mesonbuild/backend/__init__.py b/meson/mesonbuild/backend/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/meson/mesonbuild/backend/backends.py b/meson/mesonbuild/backend/backends.py new file mode 100644 index 000000000..aa8e844a7 --- /dev/null +++ b/meson/mesonbuild/backend/backends.py @@ -0,0 +1,1616 @@ +# Copyright 2012-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict +from functools import lru_cache +from itertools import chain +from pathlib import Path +import enum +import json +import os +import pickle +import re +import typing as T +import hashlib + +from .. import build +from .. import dependencies +from .. import programs +from .. import mesonlib +from .. import mlog +from ..compilers import LANGUAGES_USING_LDFLAGS, detect +from ..mesonlib import ( + File, MachineChoice, MesonException, OptionType, OrderedSet, OptionOverrideProxy, + classify_unity_sources, OptionKey, join_args +) + +if T.TYPE_CHECKING: + from .._typing import ImmutableListProtocol + from ..arglist import CompilerArgs + from ..compilers import Compiler + from ..interpreter import Interpreter, Test + from ..mesonlib import FileMode + +# Languages that can mix with C or C++ but don't support unity builds yet +# because the syntax we use for unity builds is specific to C/++/ObjC/++. +# Assembly files cannot be unitified and neither can LLVM IR files +LANGS_CANT_UNITY = ('d', 'fortran', 'vala') + +class RegenInfo: + def __init__(self, source_dir, build_dir, depfiles): + self.source_dir = source_dir + self.build_dir = build_dir + self.depfiles = depfiles + +class TestProtocol(enum.Enum): + + EXITCODE = 0 + TAP = 1 + GTEST = 2 + RUST = 3 + + @classmethod + def from_str(cls, string: str) -> 'TestProtocol': + if string == 'exitcode': + return cls.EXITCODE + elif string == 'tap': + return cls.TAP + elif string == 'gtest': + return cls.GTEST + elif string == 'rust': + return cls.RUST + raise MesonException(f'unknown test format {string}') + + def __str__(self) -> str: + cls = type(self) + if self is cls.EXITCODE: + return 'exitcode' + elif self is cls.GTEST: + return 'gtest' + elif self is cls.RUST: + return 'rust' + return 'tap' + + +class CleanTrees: + ''' + Directories outputted by custom targets that have to be manually cleaned + because on Linux `ninja clean` only deletes empty directories. + ''' + def __init__(self, build_dir, trees): + self.build_dir = build_dir + self.trees = trees + +class InstallData: + def __init__(self, source_dir: str, build_dir: str, prefix: str, + strip_bin: T.List[str], install_umask: T.Union[str, int], + mesonintrospect: T.List[str], version: str): + # TODO: in python 3.8 or with typing_Extensions install_umask could be: + # `T.Union[T.Literal['preserve'], int]`, which would be more accurate. + self.source_dir = source_dir + self.build_dir = build_dir + self.prefix = prefix + self.strip_bin = strip_bin + self.install_umask = install_umask + self.targets: T.List[TargetInstallData] = [] + self.headers: T.List[InstallDataBase] = [] + self.man: T.List[InstallDataBase] = [] + self.data: T.List[InstallDataBase] = [] + self.install_scripts: T.List[ExecutableSerialisation] = [] + self.install_subdirs: T.List[SubdirInstallData] = [] + self.mesonintrospect = mesonintrospect + self.version = version + +class TargetInstallData: + def __init__(self, fname: str, outdir: str, aliases: T.Dict[str, str], strip: bool, + install_name_mappings: T.Dict, rpath_dirs_to_remove: T.Set[bytes], + install_rpath: str, install_mode: 'FileMode', subproject: str, optional: bool = False): + self.fname = fname + self.outdir = outdir + self.aliases = aliases + self.strip = strip + self.install_name_mappings = install_name_mappings + self.rpath_dirs_to_remove = rpath_dirs_to_remove + self.install_rpath = install_rpath + self.install_mode = install_mode + self.subproject = subproject + self.optional = optional + +class InstallDataBase: + def __init__(self, path: str, install_path: str, install_mode: 'FileMode', subproject: str): + self.path = path + self.install_path = install_path + self.install_mode = install_mode + self.subproject = subproject + +class SubdirInstallData(InstallDataBase): + def __init__(self, path: str, install_path: str, install_mode: 'FileMode', exclude, subproject: str): + super().__init__(path, install_path, install_mode, subproject) + self.exclude = exclude + +class ExecutableSerialisation: + def __init__(self, cmd_args, env: T.Optional[build.EnvironmentVariables] = None, exe_wrapper=None, + workdir=None, extra_paths=None, capture=None, feed=None) -> None: + self.cmd_args = cmd_args + self.env = env + if exe_wrapper is not None: + assert(isinstance(exe_wrapper, programs.ExternalProgram)) + self.exe_runner = exe_wrapper + self.workdir = workdir + self.extra_paths = extra_paths + self.capture = capture + self.feed = feed + self.pickled = False + self.skip_if_destdir = False + self.verbose = False + self.subproject = '' + +class TestSerialisation: + def __init__(self, name: str, project: str, suite: T.List[str], fname: T.List[str], + is_cross_built: bool, exe_wrapper: T.Optional[programs.ExternalProgram], + needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str], + env: build.EnvironmentVariables, should_fail: bool, + timeout: T.Optional[int], workdir: T.Optional[str], + extra_paths: T.List[str], protocol: TestProtocol, priority: int, + cmd_is_built: bool, depends: T.List[str], version: str): + self.name = name + self.project_name = project + self.suite = suite + self.fname = fname + self.is_cross_built = is_cross_built + if exe_wrapper is not None: + assert(isinstance(exe_wrapper, programs.ExternalProgram)) + self.exe_runner = exe_wrapper + self.is_parallel = is_parallel + self.cmd_args = cmd_args + self.env = env + self.should_fail = should_fail + self.timeout = timeout + self.workdir = workdir + self.extra_paths = extra_paths + self.protocol = protocol + self.priority = priority + self.needs_exe_wrapper = needs_exe_wrapper + self.cmd_is_built = cmd_is_built + self.depends = depends + self.version = version + + +def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']: + if backend == 'ninja': + from . import ninjabackend + return ninjabackend.NinjaBackend(build, interpreter) + elif backend == 'vs': + from . import vs2010backend + return vs2010backend.autodetect_vs_version(build, interpreter) + elif backend == 'vs2010': + from . import vs2010backend + return vs2010backend.Vs2010Backend(build, interpreter) + elif backend == 'vs2012': + from . import vs2012backend + return vs2012backend.Vs2012Backend(build, interpreter) + elif backend == 'vs2013': + from . import vs2013backend + return vs2013backend.Vs2013Backend(build, interpreter) + elif backend == 'vs2015': + from . import vs2015backend + return vs2015backend.Vs2015Backend(build, interpreter) + elif backend == 'vs2017': + from . import vs2017backend + return vs2017backend.Vs2017Backend(build, interpreter) + elif backend == 'vs2019': + from . import vs2019backend + return vs2019backend.Vs2019Backend(build, interpreter) + elif backend == 'xcode': + from . import xcodebackend + return xcodebackend.XCodeBackend(build, interpreter) + return None + +# This class contains the basic functionality that is needed by all backends. +# Feel free to move stuff in and out of it as you see fit. +class Backend: + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']): + # Make it possible to construct a dummy backend + # This is used for introspection without a build directory + if build is None: + self.environment = None + return + self.build = build + self.interpreter = interpreter + self.environment = build.environment + self.processed_targets: T.Set[str] = set() + self.name = '' + self.build_dir = self.environment.get_build_dir() + self.source_dir = self.environment.get_source_dir() + self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), + self.environment.get_build_dir()) + self.src_to_build = mesonlib.relpath(self.environment.get_build_dir(), + self.environment.get_source_dir()) + + def generate(self) -> None: + raise RuntimeError(f'generate is not implemented in {type(self).__name__}') + + def get_target_filename(self, t: T.Union[build.Target, build.CustomTargetIndex], *, warn_multi_output: bool = True): + if isinstance(t, build.CustomTarget): + if warn_multi_output and len(t.get_outputs()) != 1: + mlog.warning(f'custom_target {t.name!r} has more than one output! ' + 'Using the first one.') + filename = t.get_outputs()[0] + elif isinstance(t, build.CustomTargetIndex): + filename = t.get_outputs()[0] + else: + assert(isinstance(t, build.BuildTarget)) + filename = t.get_filename() + return os.path.join(self.get_target_dir(t), filename) + + def get_target_filename_abs(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str: + return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target)) + + def get_base_options_for_target(self, target: build.BuildTarget) -> OptionOverrideProxy: + return OptionOverrideProxy(target.option_overrides_base, + {k: v for k, v in self.environment.coredata.options.items() + if k.type in {OptionType.BASE, OptionType.BUILTIN}}) + + def get_compiler_options_for_target(self, target: build.BuildTarget) -> OptionOverrideProxy: + comp_reg = {k: v for k, v in self.environment.coredata.options.items() if k.is_compiler()} + comp_override = target.option_overrides_compiler + return OptionOverrideProxy(comp_override, comp_reg) + + def get_option_for_target(self, option_name: 'OptionKey', target: build.BuildTarget): + if option_name in target.option_overrides_base: + override = target.option_overrides_base[option_name] + return self.environment.coredata.validate_option_value(option_name, override) + return self.environment.coredata.get_option(option_name.evolve(subproject=target.subproject)) + + def get_source_dir_include_args(self, target, compiler, *, absolute_path=False): + curdir = target.get_subdir() + if absolute_path: + lead = self.source_dir + else: + lead = self.build_to_src + tmppath = os.path.normpath(os.path.join(lead, curdir)) + return compiler.get_include_args(tmppath, False) + + def get_build_dir_include_args(self, target, compiler, *, absolute_path=False): + if absolute_path: + curdir = os.path.join(self.build_dir, target.get_subdir()) + else: + curdir = target.get_subdir() + if curdir == '': + curdir = '.' + return compiler.get_include_args(curdir, False) + + def get_target_filename_for_linking(self, target): + # On some platforms (msvc for instance), the file that is used for + # dynamic linking is not the same as the dynamic library itself. This + # file is called an import library, and we want to link against that. + # On all other platforms, we link to the library directly. + if isinstance(target, build.SharedLibrary): + link_lib = target.get_import_filename() or target.get_filename() + return os.path.join(self.get_target_dir(target), link_lib) + elif isinstance(target, build.StaticLibrary): + return os.path.join(self.get_target_dir(target), target.get_filename()) + elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)): + if not target.is_linkable_target(): + raise MesonException(f'Tried to link against custom target "{target.name}", which is not linkable.') + return os.path.join(self.get_target_dir(target), target.get_filename()) + elif isinstance(target, build.Executable): + if target.import_filename: + return os.path.join(self.get_target_dir(target), target.get_import_filename()) + else: + return None + raise AssertionError(f'BUG: Tried to link to {target!r} which is not linkable') + + @lru_cache(maxsize=None) + def get_target_dir(self, target: build.Target) -> str: + if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror': + dirname = target.get_subdir() + else: + dirname = 'meson-out' + return dirname + + def get_target_dir_relative_to(self, t, o): + '''Get a target dir relative to another target's directory''' + target_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t)) + othert_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(o)) + return os.path.relpath(target_dir, othert_dir) + + def get_target_source_dir(self, target): + # if target dir is empty, avoid extraneous trailing / from os.path.join() + target_dir = self.get_target_dir(target) + if target_dir: + return os.path.join(self.build_to_src, target_dir) + return self.build_to_src + + def get_target_private_dir(self, target: build.Target) -> str: + return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p') + + def get_target_private_dir_abs(self, target): + return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) + + @lru_cache(maxsize=None) + def get_target_generated_dir(self, target, gensrc, src): + """ + Takes a BuildTarget, a generator source (CustomTarget or GeneratedList), + and a generated source filename. + Returns the full path of the generated source relative to the build root + """ + # CustomTarget generators output to the build dir of the CustomTarget + if isinstance(gensrc, (build.CustomTarget, build.CustomTargetIndex)): + return os.path.join(self.get_target_dir(gensrc), src) + # GeneratedList generators output to the private build directory of the + # target that the GeneratedList is used in + return os.path.join(self.get_target_private_dir(target), src) + + def get_unity_source_file(self, target, suffix, number): + # There is a potential conflict here, but it is unlikely that + # anyone both enables unity builds and has a file called foo-unity.cpp. + osrc = f'{target.name}-unity{number}.{suffix}' + return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc) + + def generate_unity_files(self, target, unity_src): + abs_files = [] + result = [] + compsrcs = classify_unity_sources(target.compilers.values(), unity_src) + unity_size = self.get_option_for_target(OptionKey('unity_size'), target) + + def init_language_file(suffix, unity_file_number): + unity_src = self.get_unity_source_file(target, suffix, unity_file_number) + outfileabs = unity_src.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) + outfileabs_tmp = outfileabs + '.tmp' + abs_files.append(outfileabs) + outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp) + if not os.path.exists(outfileabs_tmp_dir): + os.makedirs(outfileabs_tmp_dir) + result.append(unity_src) + return open(outfileabs_tmp, 'w', encoding='utf-8') + + # For each language, generate unity source files and return the list + for comp, srcs in compsrcs.items(): + files_in_current = unity_size + 1 + unity_file_number = 0 + ofile = None + for src in srcs: + if files_in_current >= unity_size: + if ofile: + ofile.close() + ofile = init_language_file(comp.get_default_suffix(), unity_file_number) + unity_file_number += 1 + files_in_current = 0 + ofile.write(f'#include<{src}>\n') + files_in_current += 1 + if ofile: + ofile.close() + + [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files] + return result + + def relpath(self, todir, fromdir): + return os.path.relpath(os.path.join('dummyprefixdir', todir), + os.path.join('dummyprefixdir', fromdir)) + + def flatten_object_list(self, target, proj_dir_to_build_root=''): + obj_list = self._flatten_object_list(target, target.get_objects(), proj_dir_to_build_root) + return list(dict.fromkeys(obj_list)) + + def _flatten_object_list(self, target, objects, proj_dir_to_build_root): + obj_list = [] + for obj in objects: + if isinstance(obj, str): + o = os.path.join(proj_dir_to_build_root, + self.build_to_src, target.get_subdir(), obj) + obj_list.append(o) + elif isinstance(obj, mesonlib.File): + if obj.is_built: + o = os.path.join(proj_dir_to_build_root, + obj.rel_to_builddir(self.build_to_src)) + obj_list.append(o) + else: + o = os.path.join(proj_dir_to_build_root, + self.build_to_src) + obj_list.append(obj.rel_to_builddir(o)) + elif isinstance(obj, build.ExtractedObjects): + if obj.recursive: + obj_list += self._flatten_object_list(obj.target, obj.objlist, proj_dir_to_build_root) + obj_list += self.determine_ext_objs(obj, proj_dir_to_build_root) + else: + raise MesonException('Unknown data type in object list.') + return obj_list + + def is_swift_target(self, target): + for s in target.sources: + if s.endswith('swift'): + return True + return False + + def determine_swift_dep_dirs(self, target): + result = [] + for l in target.link_targets: + result.append(self.get_target_private_dir_abs(l)) + return result + + def get_executable_serialisation(self, cmd, workdir=None, + extra_bdeps=None, capture=None, feed=None, + env: T.Optional[build.EnvironmentVariables] = None): + exe = cmd[0] + cmd_args = cmd[1:] + if isinstance(exe, programs.ExternalProgram): + exe_cmd = exe.get_command() + exe_for_machine = exe.for_machine + elif isinstance(exe, build.BuildTarget): + exe_cmd = [self.get_target_filename_abs(exe)] + exe_for_machine = exe.for_machine + elif isinstance(exe, build.CustomTarget): + # The output of a custom target can either be directly runnable + # or not, that is, a script, a native binary or a cross compiled + # binary when exe wrapper is available and when it is not. + # This implementation is not exhaustive but it works in the + # common cases. + exe_cmd = [self.get_target_filename_abs(exe)] + exe_for_machine = MachineChoice.BUILD + elif isinstance(exe, mesonlib.File): + exe_cmd = [exe.rel_to_builddir(self.environment.source_dir)] + exe_for_machine = MachineChoice.BUILD + else: + exe_cmd = [exe] + exe_for_machine = MachineChoice.BUILD + + machine = self.environment.machines[exe_for_machine] + if machine.is_windows() or machine.is_cygwin(): + extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or []) + else: + extra_paths = [] + + is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine) + if is_cross_built and self.environment.need_exe_wrapper(): + exe_wrapper = self.environment.get_exe_wrapper() + if not exe_wrapper or not exe_wrapper.found(): + msg = 'An exe_wrapper is needed but was not found. Please define one ' \ + 'in cross file and check the command and/or add it to PATH.' + raise MesonException(msg) + else: + if exe_cmd[0].endswith('.jar'): + exe_cmd = ['java', '-jar'] + exe_cmd + elif exe_cmd[0].endswith('.exe') and not (mesonlib.is_windows() or mesonlib.is_cygwin() or mesonlib.is_wsl()): + exe_cmd = ['mono'] + exe_cmd + exe_wrapper = None + + workdir = workdir or self.environment.get_build_dir() + return ExecutableSerialisation(exe_cmd + cmd_args, env, + exe_wrapper, workdir, + extra_paths, capture, feed) + + def as_meson_exe_cmdline(self, tname, exe, cmd_args, workdir=None, + extra_bdeps=None, capture=None, feed=None, + force_serialize=False, + env: T.Optional[build.EnvironmentVariables] = None, + verbose: bool = False): + ''' + Serialize an executable for running with a generator or a custom target + ''' + cmd = [exe] + cmd_args + es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env) + es.verbose = verbose + reasons = [] + if es.extra_paths: + reasons.append('to set PATH') + + if es.exe_runner: + reasons.append('to use exe_wrapper') + + if workdir: + reasons.append('to set workdir') + + if any('\n' in c for c in es.cmd_args): + reasons.append('because command contains newlines') + + if es.env and es.env.varnames: + reasons.append('to set env') + + force_serialize = force_serialize or bool(reasons) + + if capture: + reasons.append('to capture output') + if feed: + reasons.append('to feed input') + + if not force_serialize: + if not capture and not feed: + return es.cmd_args, '' + args = [] + if capture: + args += ['--capture', capture] + if feed: + args += ['--feed', feed] + return ((self.environment.get_build_command() + + ['--internal', 'exe'] + args + ['--'] + es.cmd_args), + ', '.join(reasons)) + + if isinstance(exe, (programs.ExternalProgram, + build.BuildTarget, build.CustomTarget)): + basename = exe.name + elif isinstance(exe, mesonlib.File): + basename = os.path.basename(exe.fname) + else: + basename = os.path.basename(exe) + + # Can't just use exe.name here; it will likely be run more than once + # Take a digest of the cmd args, env, workdir, capture, and feed. This + # avoids collisions and also makes the name deterministic over + # regenerations which avoids a rebuild by Ninja because the cmdline + # stays the same. + data = bytes(str(es.env) + str(es.cmd_args) + str(es.workdir) + str(capture) + str(feed), + encoding='utf-8') + digest = hashlib.sha1(data).hexdigest() + scratch_file = f'meson_exe_{basename}_{digest}.dat' + exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file) + with open(exe_data, 'wb') as f: + pickle.dump(es, f) + return (self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data], + ', '.join(reasons)) + + def serialize_tests(self): + test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') + with open(test_data, 'wb') as datafile: + self.write_test_file(datafile) + benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat') + with open(benchmark_data, 'wb') as datafile: + self.write_benchmark_file(datafile) + return test_data, benchmark_data + + def determine_linker_and_stdlib_args(self, target): + ''' + If we're building a static library, there is only one static linker. + Otherwise, we query the target for the dynamic linker. + ''' + if isinstance(target, build.StaticLibrary): + return self.build.static_linker[target.for_machine], [] + l, stdlib_args = target.get_clink_dynamic_linker_and_stdlibs() + return l, stdlib_args + + @staticmethod + def _libdir_is_system(libdir, compilers, env): + libdir = os.path.normpath(libdir) + for cc in compilers.values(): + if libdir in cc.get_library_dirs(env): + return True + return False + + def get_external_rpath_dirs(self, target): + dirs = set() + args = [] + for lang in LANGUAGES_USING_LDFLAGS: + try: + args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang)) + except Exception: + pass + # Match rpath formats: + # -Wl,-rpath= + # -Wl,-rpath, + rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)') + # Match solaris style compat runpath formats: + # -Wl,-R + # -Wl,-R, + runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)') + # Match symbols formats: + # -Wl,--just-symbols= + # -Wl,--just-symbols, + symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)') + for arg in args: + rpath_match = rpath_regex.match(arg) + if rpath_match: + for dir in rpath_match.group(1).split(':'): + dirs.add(dir) + runpath_match = runpath_regex.match(arg) + if runpath_match: + for dir in runpath_match.group(1).split(':'): + # The symbols arg is an rpath if the path is a directory + if Path(dir).is_dir(): + dirs.add(dir) + symbols_match = symbols_regex.match(arg) + if symbols_match: + for dir in symbols_match.group(1).split(':'): + # Prevent usage of --just-symbols to specify rpath + if Path(dir).is_dir(): + raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.') + return dirs + + def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True): + paths = [] + for dep in target.external_deps: + if not isinstance(dep, (dependencies.ExternalLibrary, dependencies.PkgConfigDependency)): + continue + la = dep.link_args + if len(la) != 1 or not os.path.isabs(la[0]): + continue + # The only link argument is an absolute path to a library file. + libpath = la[0] + libdir = os.path.dirname(libpath) + if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment): + # No point in adding system paths. + continue + # Don't remove rpaths specified in LDFLAGS. + if libdir in self.get_external_rpath_dirs(target): + continue + # Windows doesn't support rpaths, but we use this function to + # emulate rpaths by setting PATH, so also accept DLLs here + if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']: + continue + if libdir.startswith(self.environment.get_source_dir()): + rel_to_src = libdir[len(self.environment.get_source_dir()) + 1:] + assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute' + paths.append(os.path.join(self.build_to_src, rel_to_src)) + else: + paths.append(libdir) + return paths + + def determine_rpath_dirs(self, target: build.BuildTarget) -> T.Tuple[str, ...]: + if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror': + result: OrderedSet[str] = target.get_link_dep_subdirs() + else: + result = OrderedSet() + result.add('meson-out') + result.update(self.rpaths_for_bundled_shared_libraries(target)) + target.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result]) + return tuple(result) + + @staticmethod + def canonicalize_filename(fname): + for ch in ('/', '\\', ':'): + fname = fname.replace(ch, '_') + return fname + + def object_filename_from_source(self, target, source): + assert isinstance(source, mesonlib.File) + build_dir = self.environment.get_build_dir() + rel_src = source.rel_to_builddir(self.build_to_src) + + # foo.vala files compile down to foo.c and then foo.c.o, not foo.vala.o + if rel_src.endswith(('.vala', '.gs')): + # See description in generate_vala_compile for this logic. + if source.is_built: + if os.path.isabs(rel_src): + rel_src = rel_src[len(build_dir) + 1:] + rel_src = os.path.relpath(rel_src, self.get_target_private_dir(target)) + else: + rel_src = os.path.basename(rel_src) + # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix. + source = 'meson-generated_' + rel_src[:-5] + '.c' + elif source.is_built: + if os.path.isabs(rel_src): + rel_src = rel_src[len(build_dir) + 1:] + targetdir = self.get_target_private_dir(target) + # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix. + source = 'meson-generated_' + os.path.relpath(rel_src, targetdir) + else: + if os.path.isabs(rel_src): + # Use the absolute path directly to avoid file name conflicts + source = rel_src + else: + source = os.path.relpath(os.path.join(build_dir, rel_src), + os.path.join(self.environment.get_source_dir(), target.get_subdir())) + machine = self.environment.machines[target.for_machine] + return self.canonicalize_filename(source) + '.' + machine.get_object_suffix() + + def determine_ext_objs(self, extobj, proj_dir_to_build_root): + result = [] + + # Merge sources and generated sources + sources = list(extobj.srclist) + for gensrc in extobj.genlist: + for s in gensrc.get_outputs(): + path = self.get_target_generated_dir(extobj.target, gensrc, s) + dirpart, fnamepart = os.path.split(path) + sources.append(File(True, dirpart, fnamepart)) + + # Filter out headers and all non-source files + filtered_sources = [] + for s in sources: + if self.environment.is_source(s) and not self.environment.is_header(s): + filtered_sources.append(s) + elif self.environment.is_object(s): + result.append(s.relative_name()) + sources = filtered_sources + + # extobj could contain only objects and no sources + if not sources: + return result + + targetdir = self.get_target_private_dir(extobj.target) + + # With unity builds, sources don't map directly to objects, + # we only support extracting all the objects in this mode, + # so just return all object files. + if self.is_unity(extobj.target): + compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources) + sources = [] + unity_size = self.get_option_for_target(OptionKey('unity_size'), extobj.target) + + for comp, srcs in compsrcs.items(): + if comp.language in LANGS_CANT_UNITY: + sources += srcs + continue + for i in range(len(srcs) // unity_size + 1): + osrc = self.get_unity_source_file(extobj.target, + comp.get_default_suffix(), i) + sources.append(osrc) + + for osrc in sources: + objname = self.object_filename_from_source(extobj.target, osrc) + objpath = os.path.join(proj_dir_to_build_root, targetdir, objname) + result.append(objpath) + + return result + + def get_pch_include_args(self, compiler, target): + args = [] + pchpath = self.get_target_private_dir(target) + includeargs = compiler.get_include_args(pchpath, False) + p = target.get_pch(compiler.get_language()) + if p: + args += compiler.get_pch_use_args(pchpath, p[0]) + return includeargs + args + + def create_msvc_pch_implementation(self, target, lang, pch_header): + # We have to include the language in the file name, otherwise + # pch.c and pch.cpp will both end up as pch.obj in VS backends. + impl_name = f'meson_pch-{lang}.{lang}' + pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name) + # Make sure to prepend the build dir, since the working directory is + # not defined. Otherwise, we might create the file in the wrong path. + pch_file = os.path.join(self.build_dir, pch_rel_to_build) + os.makedirs(os.path.dirname(pch_file), exist_ok=True) + + content = f'#include "{os.path.basename(pch_header)}"' + pch_file_tmp = pch_file + '.tmp' + with open(pch_file_tmp, 'w', encoding='utf-8') as f: + f.write(content) + mesonlib.replace_if_different(pch_file, pch_file_tmp) + return pch_rel_to_build + + @staticmethod + def escape_extra_args(compiler, args): + # all backslashes in defines are doubly-escaped + extra_args = [] + for arg in args: + if arg.startswith('-D') or arg.startswith('/D'): + arg = arg.replace('\\', '\\\\') + extra_args.append(arg) + + return extra_args + + def generate_basic_compiler_args(self, target: build.BuildTarget, compiler: 'Compiler', no_warn_args: bool = False) -> 'CompilerArgs': + # Create an empty commands list, and start adding arguments from + # various sources in the order in which they must override each other + # starting from hard-coded defaults followed by build options and so on. + commands = compiler.compiler_args() + + copt_proxy = self.get_compiler_options_for_target(target) + # First, the trivial ones that are impossible to override. + # + # Add -nostdinc/-nostdinc++ if needed; can't be overridden + commands += self.get_no_stdlib_args(target, compiler) + # Add things like /NOLOGO or -pipe; usually can't be overridden + commands += compiler.get_always_args() + # Only add warning-flags by default if the buildtype enables it, and if + # we weren't explicitly asked to not emit warnings (for Vala, f.ex) + if no_warn_args: + commands += compiler.get_no_warn_args() + else: + commands += compiler.get_warn_args(self.get_option_for_target(OptionKey('warning_level'), target)) + # Add -Werror if werror=true is set in the build options set on the + # command-line or default_options inside project(). This only sets the + # action to be done for warnings if/when they are emitted, so it's ok + # to set it after get_no_warn_args() or get_warn_args(). + if self.get_option_for_target(OptionKey('werror'), target): + commands += compiler.get_werror_args() + # Add compile args for c_* or cpp_* build options set on the + # command-line or default_options inside project(). + commands += compiler.get_option_compile_args(copt_proxy) + # Add buildtype args: optimization level, debugging, etc. + commands += compiler.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target)) + commands += compiler.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target)) + commands += compiler.get_debug_args(self.get_option_for_target(OptionKey('debug'), target)) + # Add compile args added using add_project_arguments() + commands += self.build.get_project_args(compiler, target.subproject, target.for_machine) + # Add compile args added using add_global_arguments() + # These override per-project arguments + commands += self.build.get_global_args(compiler, target.for_machine) + # Using both /ZI and /Zi at the same times produces a compiler warning. + # We do not add /ZI by default. If it is being used it is because the user has explicitly enabled it. + # /ZI needs to be removed in that case to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi') + if ('/ZI' in commands) and ('/Zi' in commands): + commands.remove('/Zi') + # Compile args added from the env: CFLAGS/CXXFLAGS, etc, or the cross + # file. We want these to override all the defaults, but not the + # per-target compile args. + commands += self.environment.coredata.get_external_args(target.for_machine, compiler.get_language()) + # Always set -fPIC for shared libraries + if isinstance(target, build.SharedLibrary): + commands += compiler.get_pic_args() + # Set -fPIC for static libraries by default unless explicitly disabled + if isinstance(target, build.StaticLibrary) and target.pic: + commands += compiler.get_pic_args() + elif isinstance(target, (build.StaticLibrary, build.Executable)) and target.pie: + commands += compiler.get_pie_args() + # Add compile args needed to find external dependencies. Link args are + # added while generating the link command. + # NOTE: We must preserve the order in which external deps are + # specified, so we reverse the list before iterating over it. + for dep in reversed(target.get_external_deps()): + if not dep.found(): + continue + + if compiler.language == 'vala': + if isinstance(dep, dependencies.PkgConfigDependency): + if dep.name == 'glib-2.0' and dep.version_reqs is not None: + for req in dep.version_reqs: + if req.startswith(('>=', '==')): + commands += ['--target-glib', req[2:]] + break + commands += ['--pkg', dep.name] + elif isinstance(dep, dependencies.ExternalLibrary): + commands += dep.get_link_args('vala') + else: + commands += compiler.get_dependency_compile_args(dep) + # Qt needs -fPIC for executables + # XXX: We should move to -fPIC for all executables + if isinstance(target, build.Executable): + commands += dep.get_exe_args(compiler) + # For 'automagic' deps: Boost and GTest. Also dependency('threads'). + # pkg-config puts the thread flags itself via `Cflags:` + # Fortran requires extra include directives. + if compiler.language == 'fortran': + for lt in chain(target.link_targets, target.link_whole_targets): + priv_dir = self.get_target_private_dir(lt) + commands += compiler.get_include_args(priv_dir, False) + return commands + + def build_target_link_arguments(self, compiler, deps): + args = [] + for d in deps: + if not (d.is_linkable_target()): + raise RuntimeError(f'Tried to link with a non-library target "{d.get_basename()}".') + arg = self.get_target_filename_for_linking(d) + if not arg: + continue + if compiler.get_language() == 'd': + arg = '-Wl,' + arg + else: + arg = compiler.get_linker_lib_prefix() + arg + args.append(arg) + return args + + def get_mingw_extra_paths(self, target): + paths = OrderedSet() + # The cross bindir + root = self.environment.properties[target.for_machine].get_root() + if root: + paths.add(os.path.join(root, 'bin')) + # The toolchain bindir + sys_root = self.environment.properties[target.for_machine].get_sys_root() + if sys_root: + paths.add(os.path.join(sys_root, 'bin')) + # Get program and library dirs from all target compilers + if isinstance(target, build.BuildTarget): + for cc in target.compilers.values(): + paths.update(cc.get_program_dirs(self.environment)) + paths.update(cc.get_library_dirs(self.environment)) + return list(paths) + + def determine_windows_extra_paths(self, target: T.Union[build.BuildTarget, str], extra_bdeps): + '''On Windows there is no such thing as an rpath. + We must determine all locations of DLLs that this exe + links to and return them so they can be used in unit + tests.''' + result = set() + prospectives = set() + if isinstance(target, build.BuildTarget): + prospectives.update(target.get_transitive_link_deps()) + # External deps + for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False): + result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath))) + for bdep in extra_bdeps: + prospectives.add(bdep) + prospectives.update(bdep.get_transitive_link_deps()) + # Internal deps + for ld in prospectives: + if ld == '' or ld == '.': + continue + dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld)) + result.add(dirseg) + if (isinstance(target, build.BuildTarget) and + not self.environment.machines.matches_build_machine(target.for_machine)): + result.update(self.get_mingw_extra_paths(target)) + return list(result) + + def write_benchmark_file(self, datafile): + self.write_test_serialisation(self.build.get_benchmarks(), datafile) + + def write_test_file(self, datafile): + self.write_test_serialisation(self.build.get_tests(), datafile) + + def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSerialisation]: + arr = [] + for t in sorted(tests, key=lambda tst: -1 * tst.priority): + exe = t.get_exe() + if isinstance(exe, programs.ExternalProgram): + cmd = exe.get_command() + else: + cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))] + if isinstance(exe, (build.BuildTarget, programs.ExternalProgram)): + test_for_machine = exe.for_machine + else: + # E.g. an external verifier or simulator program run on a generated executable. + # Can always be run without a wrapper. + test_for_machine = MachineChoice.BUILD + + # we allow passing compiled executables to tests, which may be cross built. + # We need to consider these as well when considering whether the target is cross or not. + for a in t.cmd_args: + if isinstance(a, build.BuildTarget): + if a.for_machine is MachineChoice.HOST: + test_for_machine = MachineChoice.HOST + break + + is_cross = self.environment.is_cross_build(test_for_machine) + if is_cross and self.environment.need_exe_wrapper(): + exe_wrapper = self.environment.get_exe_wrapper() + else: + exe_wrapper = None + machine = self.environment.machines[exe.for_machine] + if machine.is_windows() or machine.is_cygwin(): + extra_bdeps = [] + if isinstance(exe, build.CustomTarget): + extra_bdeps = exe.get_transitive_build_target_deps() + extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps) + else: + extra_paths = [] + + cmd_args = [] + depends = set(t.depends) + if isinstance(exe, build.Target): + depends.add(exe) + for a in t.cmd_args: + if isinstance(a, build.Target): + depends.add(a) + if isinstance(a, build.BuildTarget): + extra_paths += self.determine_windows_extra_paths(a, []) + if isinstance(a, mesonlib.File): + a = os.path.join(self.environment.get_build_dir(), a.rel_to_builddir(self.build_to_src)) + cmd_args.append(a) + elif isinstance(a, str): + cmd_args.append(a) + elif isinstance(a, build.Executable): + p = self.construct_target_rel_path(a, t.workdir) + if p == a.get_filename(): + p = './' + p + cmd_args.append(p) + elif isinstance(a, build.Target): + cmd_args.append(self.construct_target_rel_path(a, t.workdir)) + else: + raise MesonException('Bad object in test command.') + ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross, + exe_wrapper, self.environment.need_exe_wrapper(), + t.is_parallel, cmd_args, t.env, + t.should_fail, t.timeout, t.workdir, + extra_paths, t.protocol, t.priority, + isinstance(exe, build.Executable), + [x.get_id() for x in depends], + self.environment.coredata.version) + arr.append(ts) + return arr + + def write_test_serialisation(self, tests: T.List['Test'], datafile: str): + pickle.dump(self.create_test_serialisation(tests), datafile) + + def construct_target_rel_path(self, a, workdir): + if workdir is None: + return self.get_target_filename(a) + assert(os.path.isabs(workdir)) + abs_path = self.get_target_filename_abs(a) + return os.path.relpath(abs_path, workdir) + + def generate_depmf_install(self, d: InstallData) -> None: + if self.build.dep_manifest_name is None: + return + ifilename = os.path.join(self.environment.get_build_dir(), 'depmf.json') + ofilename = os.path.join(self.environment.get_prefix(), self.build.dep_manifest_name) + mfobj = {'type': 'dependency manifest', 'version': '1.0', 'projects': self.build.dep_manifest} + with open(ifilename, 'w', encoding='utf-8') as f: + f.write(json.dumps(mfobj)) + # Copy file from, to, and with mode unchanged + d.data.append(InstallDataBase(ifilename, ofilename, None, '')) + + def get_regen_filelist(self): + '''List of all files whose alteration means that the build + definition needs to be regenerated.''' + deps = [str(Path(self.build_to_src) / df) + for df in self.interpreter.get_build_def_files()] + if self.environment.is_cross_build(): + deps.extend(self.environment.coredata.cross_files) + deps.extend(self.environment.coredata.config_files) + deps.append('meson-private/coredata.dat') + self.check_clock_skew(deps) + return deps + + def generate_regen_info(self): + deps = self.get_regen_filelist() + regeninfo = RegenInfo(self.environment.get_source_dir(), + self.environment.get_build_dir(), + deps) + filename = os.path.join(self.environment.get_scratch_dir(), + 'regeninfo.dump') + with open(filename, 'wb') as f: + pickle.dump(regeninfo, f) + + def check_clock_skew(self, file_list): + # If a file that leads to reconfiguration has a time + # stamp in the future, it will trigger an eternal reconfigure + # loop. + import time + now = time.time() + for f in file_list: + absf = os.path.join(self.environment.get_build_dir(), f) + ftime = os.path.getmtime(absf) + delta = ftime - now + # On Windows disk time stamps sometimes point + # to the future by a minuscule amount, less than + # 0.001 seconds. I don't know why. + if delta > 0.001: + raise MesonException(f'Clock skew detected. File {absf} has a time stamp {delta:.4f}s in the future.') + + def build_target_to_cmd_array(self, bt): + if isinstance(bt, build.BuildTarget): + arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(bt))] + else: + arr = bt.get_command() + return arr + + def replace_extra_args(self, args, genlist): + final_args = [] + for a in args: + if a == '@EXTRA_ARGS@': + final_args += genlist.get_extra_args() + else: + final_args.append(a) + return final_args + + def replace_outputs(self, args, private_dir, output_list): + newargs = [] + regex = re.compile(r'@OUTPUT(\d+)@') + for arg in args: + m = regex.search(arg) + while m is not None: + index = int(m.group(1)) + src = f'@OUTPUT{index}@' + arg = arg.replace(src, os.path.join(private_dir, output_list[index])) + m = regex.search(arg) + newargs.append(arg) + return newargs + + def get_build_by_default_targets(self): + result = OrderedDict() + # Get all build and custom targets that must be built by default + for name, t in self.build.get_targets().items(): + if t.build_by_default: + result[name] = t + # Get all targets used as test executables and arguments. These must + # also be built by default. XXX: Sometime in the future these should be + # built only before running tests. + for t in self.build.get_tests(): + exe = t.exe + if isinstance(exe, (build.CustomTarget, build.BuildTarget)): + result[exe.get_id()] = exe + for arg in t.cmd_args: + if not isinstance(arg, (build.CustomTarget, build.BuildTarget)): + continue + result[arg.get_id()] = arg + for dep in t.depends: + assert isinstance(dep, (build.CustomTarget, build.BuildTarget)) + result[dep.get_id()] = dep + return result + + @lru_cache(maxsize=None) + def get_custom_target_provided_by_generated_source(self, generated_source: build.CustomTarget) -> 'ImmutableListProtocol[str]': + libs: T.List[str] = [] + for f in generated_source.get_outputs(): + if self.environment.is_library(f): + libs.append(os.path.join(self.get_target_dir(generated_source), f)) + return libs + + @lru_cache(maxsize=None) + def get_custom_target_provided_libraries(self, target: T.Union[build.BuildTarget, build.CustomTarget]) -> 'ImmutableListProtocol[str]': + libs: T.List[str] = [] + for t in target.get_generated_sources(): + if not isinstance(t, build.CustomTarget): + continue + libs.extend(self.get_custom_target_provided_by_generated_source(t)) + return libs + + def is_unity(self, target): + optval = self.get_option_for_target(OptionKey('unity'), target) + if optval == 'on' or (optval == 'subprojects' and target.subproject != ''): + return True + return False + + def get_custom_target_sources(self, target): + ''' + Custom target sources can be of various object types; strings, File, + BuildTarget, even other CustomTargets. + Returns the path to them relative to the build root directory. + ''' + srcs = [] + for i in target.get_sources(): + if isinstance(i, str): + fname = [os.path.join(self.build_to_src, target.subdir, i)] + elif isinstance(i, build.BuildTarget): + fname = [self.get_target_filename(i)] + elif isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): + fname = [os.path.join(self.get_custom_target_output_dir(i), p) for p in i.get_outputs()] + elif isinstance(i, build.GeneratedList): + fname = [os.path.join(self.get_target_private_dir(target), p) for p in i.get_outputs()] + elif isinstance(i, build.ExtractedObjects): + fname = [os.path.join(self.get_target_private_dir(i.target), p) for p in i.get_outputs(self)] + else: + fname = [i.rel_to_builddir(self.build_to_src)] + if target.absolute_paths: + fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname] + srcs += fname + return srcs + + def get_custom_target_depend_files(self, target, absolute_paths=False): + deps = [] + for i in target.depend_files: + if isinstance(i, mesonlib.File): + if absolute_paths: + deps.append(i.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir())) + else: + deps.append(i.rel_to_builddir(self.build_to_src)) + else: + if absolute_paths: + deps.append(os.path.join(self.environment.get_source_dir(), target.subdir, i)) + else: + deps.append(os.path.join(self.build_to_src, target.subdir, i)) + return deps + + def get_custom_target_output_dir(self, target): + # The XCode backend is special. A target foo/bar does + # not go to ${BUILDDIR}/foo/bar but instead to + # ${BUILDDIR}/${BUILDTYPE}/foo/bar. + # Currently we set the include dir to be the former, + # and not the latter. Thus we need this extra customisation + # point. If in the future we make include dirs et al match + # ${BUILDDIR}/${BUILDTYPE} instead, this becomes unnecessary. + return self.get_target_dir(target) + + @lru_cache(maxsize=None) + def get_normpath_target(self, source) -> str: + return os.path.normpath(source) + + def get_custom_target_dirs(self, target, compiler, *, absolute_path=False): + custom_target_include_dirs = [] + for i in target.get_generated_sources(): + # Generator output goes into the target private dir which is + # already in the include paths list. Only custom targets have their + # own target build dir. + if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): + continue + idir = self.get_normpath_target(self.get_custom_target_output_dir(i)) + if not idir: + idir = '.' + if absolute_path: + idir = os.path.join(self.environment.get_build_dir(), idir) + if idir not in custom_target_include_dirs: + custom_target_include_dirs.append(idir) + return custom_target_include_dirs + + def get_custom_target_dir_include_args(self, target, compiler, *, absolute_path=False): + incs = [] + for i in self.get_custom_target_dirs(target, compiler, absolute_path=absolute_path): + incs += compiler.get_include_args(i, False) + return incs + + + def eval_custom_target_command(self, target, absolute_outputs=False): + # We want the outputs to be absolute only when using the VS backend + # XXX: Maybe allow the vs backend to use relative paths too? + source_root = self.build_to_src + build_root = '.' + outdir = self.get_custom_target_output_dir(target) + if absolute_outputs: + source_root = self.environment.get_source_dir() + build_root = self.environment.get_build_dir() + outdir = os.path.join(self.environment.get_build_dir(), outdir) + outputs = [] + for i in target.get_outputs(): + outputs.append(os.path.join(outdir, i)) + inputs = self.get_custom_target_sources(target) + # Evaluate the command list + cmd = [] + for i in target.command: + if isinstance(i, build.BuildTarget): + cmd += self.build_target_to_cmd_array(i) + continue + elif isinstance(i, build.CustomTarget): + # GIR scanner will attempt to execute this binary but + # it assumes that it is in path, so always give it a full path. + tmp = i.get_outputs()[0] + i = os.path.join(self.get_custom_target_output_dir(i), tmp) + elif isinstance(i, mesonlib.File): + i = i.rel_to_builddir(self.build_to_src) + if target.absolute_paths or absolute_outputs: + i = os.path.join(self.environment.get_build_dir(), i) + # FIXME: str types are blindly added ignoring 'target.absolute_paths' + # because we can't know if they refer to a file or just a string + elif isinstance(i, str): + if '@SOURCE_ROOT@' in i: + i = i.replace('@SOURCE_ROOT@', source_root) + if '@BUILD_ROOT@' in i: + i = i.replace('@BUILD_ROOT@', build_root) + if '@CURRENT_SOURCE_DIR@' in i: + i = i.replace('@CURRENT_SOURCE_DIR@', os.path.join(source_root, target.subdir)) + if '@DEPFILE@' in i: + if target.depfile is None: + msg = f'Custom target {target.name!r} has @DEPFILE@ but no depfile ' \ + 'keyword argument.' + raise MesonException(msg) + dfilename = os.path.join(outdir, target.depfile) + i = i.replace('@DEPFILE@', dfilename) + if '@PRIVATE_DIR@' in i: + if target.absolute_paths: + pdir = self.get_target_private_dir_abs(target) + else: + pdir = self.get_target_private_dir(target) + i = i.replace('@PRIVATE_DIR@', pdir) + else: + raise RuntimeError(f'Argument {i} is of unknown type {type(i)}') + cmd.append(i) + # Substitute the rest of the template strings + values = mesonlib.get_filenames_templates_dict(inputs, outputs) + cmd = mesonlib.substitute_values(cmd, values) + # This should not be necessary but removing it breaks + # building GStreamer on Windows. The underlying issue + # is problems with quoting backslashes on Windows + # which is the seventh circle of hell. The downside is + # that this breaks custom targets whose command lines + # have backslashes. If you try to fix this be sure to + # check that it does not break GST. + # + # The bug causes file paths such as c:\foo to get escaped + # into c:\\foo. + # + # Unfortunately we have not been able to come up with an + # isolated test case for this so unless you manage to come up + # with one, the only way is to test the building with Gst's + # setup. Note this in your MR or ping us and we will get it + # fixed. + # + # https://github.com/mesonbuild/meson/pull/737 + cmd = [i.replace('\\', '/') for i in cmd] + return inputs, outputs, cmd + + def get_run_target_env(self, target: build.RunTarget) -> build.EnvironmentVariables: + env = target.env if target.env else build.EnvironmentVariables() + introspect_cmd = join_args(self.environment.get_build_command() + ['introspect']) + env.set('MESON_SOURCE_ROOT', [self.environment.get_source_dir()]) + env.set('MESON_BUILD_ROOT', [self.environment.get_build_dir()]) + env.set('MESON_SUBDIR', [target.subdir]) + env.set('MESONINTROSPECT', [introspect_cmd]) + return env + + def run_postconf_scripts(self) -> None: + from ..scripts.meson_exe import run_exe + introspect_cmd = join_args(self.environment.get_build_command() + ['introspect']) + env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(), + 'MESON_BUILD_ROOT': self.environment.get_build_dir(), + 'MESONINTROSPECT': introspect_cmd, + } + + for s in self.build.postconf_scripts: + name = ' '.join(s.cmd_args) + mlog.log(f'Running postconf script {name!r}') + run_exe(s, env) + + def create_install_data(self) -> InstallData: + strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip') + if strip_bin is None: + if self.environment.is_cross_build(): + mlog.warning('Cross file does not specify strip binary, result will not be stripped.') + else: + # TODO go through all candidates, like others + strip_bin = [detect.defaults['strip'][0]] + d = InstallData(self.environment.get_source_dir(), + self.environment.get_build_dir(), + self.environment.get_prefix(), + strip_bin, + self.environment.coredata.get_option(OptionKey('install_umask')), + self.environment.get_build_command() + ['introspect'], + self.environment.coredata.version) + self.generate_depmf_install(d) + self.generate_target_install(d) + self.generate_header_install(d) + self.generate_man_install(d) + self.generate_data_install(d) + self.generate_custom_install_script(d) + self.generate_subdir_install(d) + return d + + def create_install_data_files(self): + install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat') + with open(install_data_file, 'wb') as ofile: + pickle.dump(self.create_install_data(), ofile) + + def generate_target_install(self, d: InstallData) -> None: + for t in self.build.get_targets().values(): + if not t.should_install(): + continue + outdirs, custom_install_dir = t.get_install_dir(self.environment) + # Sanity-check the outputs and install_dirs + num_outdirs, num_out = len(outdirs), len(t.get_outputs()) + if num_outdirs != 1 and num_outdirs != num_out: + m = 'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n' \ + "Pass 'false' for outputs that should not be installed and 'true' for\n" \ + 'using the default installation directory for an output.' + raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs)) + install_mode = t.get_custom_install_mode() + # Install the target output(s) + if isinstance(t, build.BuildTarget): + # In general, stripping static archives is tricky and full of pitfalls. + # Wholesale stripping of static archives with a command such as + # + # strip libfoo.a + # + # is broken, as GNU's strip will remove *every* symbol in a static + # archive. One solution to this nonintuitive behaviour would be + # to only strip local/debug symbols. Unfortunately, strip arguments + # are not specified by POSIX and therefore not portable. GNU's `-g` + # option (i.e. remove debug symbols) is equivalent to Apple's `-S`. + # + # TODO: Create GNUStrip/AppleStrip/etc. hierarchy for more + # fine-grained stripping of static archives. + should_strip = not isinstance(t, build.StaticLibrary) and self.get_option_for_target(OptionKey('strip'), t) + assert isinstance(should_strip, bool), 'for mypy' + # Install primary build output (library/executable/jar, etc) + # Done separately because of strip/aliases/rpath + if outdirs[0] is not False: + mappings = t.get_link_deps_mapping(d.prefix, self.environment) + i = TargetInstallData(self.get_target_filename(t), outdirs[0], + t.get_aliases(), should_strip, mappings, + t.rpath_dirs_to_remove, + t.install_rpath, install_mode, t.subproject) + d.targets.append(i) + + if isinstance(t, (build.SharedLibrary, build.SharedModule, build.Executable)): + # On toolchains/platforms that use an import library for + # linking (separate from the shared library with all the + # code), we need to install that too (dll.a/.lib). + if t.get_import_filename(): + if custom_install_dir: + # If the DLL is installed into a custom directory, + # install the import library into the same place so + # it doesn't go into a surprising place + implib_install_dir = outdirs[0] + else: + implib_install_dir = self.environment.get_import_lib_dir() + # Install the import library; may not exist for shared modules + i = TargetInstallData(self.get_target_filename_for_linking(t), + implib_install_dir, {}, False, {}, set(), '', install_mode, + t.subproject, optional=isinstance(t, build.SharedModule)) + d.targets.append(i) + + if not should_strip and t.get_debug_filename(): + debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename()) + i = TargetInstallData(debug_file, outdirs[0], + {}, False, {}, set(), '', + install_mode, t.subproject, + optional=True) + d.targets.append(i) + # Install secondary outputs. Only used for Vala right now. + if num_outdirs > 1: + for output, outdir in zip(t.get_outputs()[1:], outdirs[1:]): + # User requested that we not install this output + if outdir is False: + continue + f = os.path.join(self.get_target_dir(t), output) + i = TargetInstallData(f, outdir, {}, False, {}, set(), None, + install_mode, t.subproject) + d.targets.append(i) + elif isinstance(t, build.CustomTarget): + # If only one install_dir is specified, assume that all + # outputs will be installed into it. This is for + # backwards-compatibility and because it makes sense to + # avoid repetition since this is a common use-case. + # + # To selectively install only some outputs, pass `false` as + # the install_dir for the corresponding output by index + if num_outdirs == 1 and num_out > 1: + for output in t.get_outputs(): + f = os.path.join(self.get_target_dir(t), output) + i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode, + t.subproject, optional=not t.build_by_default) + d.targets.append(i) + else: + for output, outdir in zip(t.get_outputs(), outdirs): + # User requested that we not install this output + if outdir is False: + continue + f = os.path.join(self.get_target_dir(t), output) + i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode, + t.subproject, optional=not t.build_by_default) + d.targets.append(i) + + def generate_custom_install_script(self, d: InstallData) -> None: + d.install_scripts = self.build.install_scripts + + def generate_header_install(self, d: InstallData) -> None: + incroot = self.environment.get_includedir() + headers = self.build.get_headers() + + srcdir = self.environment.get_source_dir() + builddir = self.environment.get_build_dir() + for h in headers: + outdir = h.get_custom_install_dir() + if outdir is None: + subdir = h.get_install_subdir() + if subdir is None: + outdir = incroot + else: + outdir = os.path.join(incroot, subdir) + + for f in h.get_sources(): + if not isinstance(f, File): + raise MesonException(f'Invalid header type {f!r} can\'t be installed') + abspath = f.absolute_path(srcdir, builddir) + i = InstallDataBase(abspath, outdir, h.get_custom_install_mode(), h.subproject) + d.headers.append(i) + + def generate_man_install(self, d: InstallData) -> None: + manroot = self.environment.get_mandir() + man = self.build.get_man() + for m in man: + for f in m.get_sources(): + num = f.split('.')[-1] + subdir = m.get_custom_install_dir() + if subdir is None: + if m.locale: + subdir = os.path.join(manroot, m.locale, 'man' + num) + else: + subdir = os.path.join(manroot, 'man' + num) + fname = f.fname + if m.locale: # strip locale from file name + fname = fname.replace(f'.{m.locale}', '') + srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + dstabs = os.path.join(subdir, os.path.basename(fname)) + i = InstallDataBase(srcabs, dstabs, m.get_custom_install_mode(), m.subproject) + d.man.append(i) + + def generate_data_install(self, d: InstallData): + data = self.build.get_data() + srcdir = self.environment.get_source_dir() + builddir = self.environment.get_build_dir() + for de in data: + assert(isinstance(de, build.Data)) + subdir = de.install_dir + if not subdir: + subdir = os.path.join(self.environment.get_datadir(), self.interpreter.build.project_name) + for src_file, dst_name in zip(de.sources, de.rename): + assert(isinstance(src_file, mesonlib.File)) + dst_abs = os.path.join(subdir, dst_name) + i = InstallDataBase(src_file.absolute_path(srcdir, builddir), dst_abs, de.install_mode, de.subproject) + d.data.append(i) + + def generate_subdir_install(self, d: InstallData) -> None: + for sd in self.build.get_install_subdirs(): + if sd.from_source_dir: + from_dir = self.environment.get_source_dir() + else: + from_dir = self.environment.get_build_dir() + src_dir = os.path.join(from_dir, + sd.source_subdir, + sd.installable_subdir).rstrip('/') + dst_dir = os.path.join(self.environment.get_prefix(), + sd.install_dir) + if not sd.strip_directory: + dst_dir = os.path.join(dst_dir, os.path.basename(src_dir)) + i = SubdirInstallData(src_dir, dst_dir, sd.install_mode, sd.exclude, sd.subproject) + d.install_subdirs.append(i) + + def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: + ''' + Returns a list of source dicts with the following format for a given target: + [ + { + "language": "", + "compiler": ["result", "of", "comp.get_exelist()"], + "parameters": ["list", "of", "compiler", "parameters], + "sources": ["list", "of", "all", "", "source", "files"], + "generated_sources": ["list", "of", "generated", "source", "files"] + } + ] + + This is a limited fallback / reference implementation. The backend should override this method. + ''' + if isinstance(target, (build.CustomTarget, build.BuildTarget)): + source_list_raw = target.sources + source_list = [] + for j in source_list_raw: + if isinstance(j, mesonlib.File): + source_list += [j.absolute_path(self.source_dir, self.build_dir)] + elif isinstance(j, str): + source_list += [os.path.join(self.source_dir, j)] + elif isinstance(j, (build.CustomTarget, build.BuildTarget)): + source_list += [os.path.join(self.build_dir, j.get_subdir(), o) for o in j.get_outputs()] + source_list = list(map(lambda x: os.path.normpath(x), source_list)) + + compiler = [] + if isinstance(target, build.CustomTarget): + tmp_compiler = target.command + if not isinstance(compiler, list): + tmp_compiler = [compiler] + for j in tmp_compiler: + if isinstance(j, mesonlib.File): + compiler += [j.absolute_path(self.source_dir, self.build_dir)] + elif isinstance(j, str): + compiler += [j] + elif isinstance(j, (build.BuildTarget, build.CustomTarget)): + compiler += j.get_outputs() + else: + raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug') + + return [{ + 'language': 'unknown', + 'compiler': compiler, + 'parameters': [], + 'sources': source_list, + 'generated_sources': [] + }] + + return [] + + def get_devenv(self) -> build.EnvironmentVariables: + env = build.EnvironmentVariables() + extra_paths = set() + library_paths = set() + for t in self.build.get_targets().values(): + cross_built = not self.environment.machines.matches_build_machine(t.for_machine) + can_run = not cross_built or not self.environment.need_exe_wrapper() + in_default_dir = t.should_install() and not t.get_install_dir(self.environment)[1] + if not can_run or not in_default_dir: + continue + tdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t)) + if isinstance(t, build.Executable): + # Add binaries that are going to be installed in bindir into PATH + # so they get used by default instead of searching on system when + # in developer environment. + extra_paths.add(tdir) + if mesonlib.is_windows() or mesonlib.is_cygwin(): + # On windows we cannot rely on rpath to run executables from build + # directory. We have to add in PATH the location of every DLL needed. + extra_paths.update(self.determine_windows_extra_paths(t, [])) + elif isinstance(t, build.SharedLibrary): + # Add libraries that are going to be installed in libdir into + # LD_LIBRARY_PATH. This allows running system applications using + # that library. + library_paths.add(tdir) + if mesonlib.is_windows() or mesonlib.is_cygwin(): + extra_paths.update(library_paths) + elif mesonlib.is_osx(): + env.prepend('DYLD_LIBRARY_PATH', list(library_paths)) + else: + env.prepend('LD_LIBRARY_PATH', list(library_paths)) + env.prepend('PATH', list(extra_paths)) + return env diff --git a/meson/mesonbuild/backend/ninjabackend.py b/meson/mesonbuild/backend/ninjabackend.py new file mode 100644 index 000000000..ca8379161 --- /dev/null +++ b/meson/mesonbuild/backend/ninjabackend.py @@ -0,0 +1,3352 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing as T +import os +import re +import pickle +import shlex +import subprocess +from collections import OrderedDict +from enum import Enum, unique +import itertools +from textwrap import dedent +from pathlib import PurePath, Path +from functools import lru_cache + +from . import backends +from .. import modules +from .. import environment, mesonlib +from .. import build +from .. import mlog +from .. import compilers +from ..arglist import CompilerArgs +from ..compilers import ( + Compiler, CCompiler, + FortranCompiler, + mixins, + PGICCompiler, + VisualStudioLikeCompiler, +) +from ..linkers import ArLinker, RSPFileSyntax +from ..mesonlib import ( + File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine, + ProgressBar, quote_arg +) +from ..mesonlib import get_compiler_for_source, has_path_sep, OptionKey +from .backends import CleanTrees +from ..build import GeneratedList, InvalidArguments, ExtractedObjects +from ..interpreter import Interpreter +from ..mesonmain import need_setup_vsenv + +if T.TYPE_CHECKING: + from .._typing import ImmutableListProtocol + from ..linkers import StaticLinker + from ..compilers.cs import CsCompiler + + +FORTRAN_INCLUDE_PAT = r"^\s*#?include\s*['\"](\w+\.\w+)['\"]" +FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$" +FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)" +FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)" + +def cmd_quote(s): + # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks + + # backslash escape any existing double quotes + # any existing backslashes preceding a quote are doubled + s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s) + # any terminal backslashes likewise need doubling + s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s) + # and double quote + s = f'"{s}"' + + return s + +def gcc_rsp_quote(s): + # see: the function buildargv() in libiberty + # + # this differs from sh-quoting in that a backslash *always* escapes the + # following character, even inside single quotes. + + s = s.replace('\\', '\\\\') + + return shlex.quote(s) + +# How ninja executes command lines differs between Unix and Windows +# (see https://ninja-build.org/manual.html#ref_rule_command) +if mesonlib.is_windows(): + quote_func = cmd_quote + execute_wrapper = ['cmd', '/c'] # unused + rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&'] +else: + quote_func = quote_arg + execute_wrapper = [] + rmfile_prefix = ['rm', '-f', '{}', '&&'] + + +def get_rsp_threshold(): + '''Return a conservative estimate of the commandline size in bytes + above which a response file should be used. May be overridden for + debugging by setting environment variable MESON_RSP_THRESHOLD.''' + + if mesonlib.is_windows(): + # Usually 32k, but some projects might use cmd.exe, + # and that has a limit of 8k. + limit = 8192 + else: + # On Linux, ninja always passes the commandline as a single + # big string to /bin/sh, and the kernel limits the size of a + # single argument; see MAX_ARG_STRLEN + limit = 131072 + # Be conservative + limit = limit / 2 + return int(os.environ.get('MESON_RSP_THRESHOLD', limit)) + +# a conservative estimate of the command-line length limit +rsp_threshold = get_rsp_threshold() + +# ninja variables whose value should remain unquoted. The value of these ninja +# variables (or variables we use them in) is interpreted directly by ninja +# (e.g. the value of the depfile variable is a pathname that ninja will read +# from, etc.), so it must not be shell quoted. +raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep', 'dyndep'} + +NINJA_QUOTE_BUILD_PAT = re.compile(r"[$ :\n]") +NINJA_QUOTE_VAR_PAT = re.compile(r"[$ \n]") + +def ninja_quote(text: str, is_build_line=False) -> str: + if is_build_line: + quote_re = NINJA_QUOTE_BUILD_PAT + else: + quote_re = NINJA_QUOTE_VAR_PAT + # Fast path for when no quoting is necessary + if not quote_re.search(text): + return text + if '\n' in text: + errmsg = f'''Ninja does not support newlines in rules. The content was: + +{text} + +Please report this error with a test case to the Meson bug tracker.''' + raise MesonException(errmsg) + return quote_re.sub(r'$\g<0>', text) + +class TargetDependencyScannerInfo: + def __init__(self, private_dir: str, source2object: T.Dict[str, str]): + self.private_dir = private_dir + self.source2object = source2object + +@unique +class Quoting(Enum): + both = 0 + notShell = 1 + notNinja = 2 + none = 3 + +class NinjaCommandArg: + def __init__(self, s, quoting = Quoting.both): + self.s = s + self.quoting = quoting + + def __str__(self): + return self.s + + @staticmethod + def list(l, q): + return [NinjaCommandArg(i, q) for i in l] + +class NinjaComment: + def __init__(self, comment): + self.comment = comment + + def write(self, outfile): + for l in self.comment.split('\n'): + outfile.write('# ') + outfile.write(l) + outfile.write('\n') + outfile.write('\n') + +class NinjaRule: + def __init__(self, rule, command, args, description, + rspable = False, deps = None, depfile = None, extra = None, + rspfile_quote_style: RSPFileSyntax = RSPFileSyntax.GCC): + + def strToCommandArg(c): + if isinstance(c, NinjaCommandArg): + return c + + # deal with common cases here, so we don't have to explicitly + # annotate the required quoting everywhere + if c == '&&': + # shell constructs shouldn't be shell quoted + return NinjaCommandArg(c, Quoting.notShell) + if c.startswith('$'): + var = re.search(r'\$\{?(\w*)\}?', c).group(1) + if var not in raw_names: + # ninja variables shouldn't be ninja quoted, and their value + # is already shell quoted + return NinjaCommandArg(c, Quoting.none) + else: + # shell quote the use of ninja variables whose value must + # not be shell quoted (as it also used by ninja) + return NinjaCommandArg(c, Quoting.notNinja) + + return NinjaCommandArg(c) + + self.name = rule + self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile + self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used + self.description = description + self.deps = deps # depstyle 'gcc' or 'msvc' + self.depfile = depfile + self.extra = extra + self.rspable = rspable # if a rspfile can be used + self.refcount = 0 + self.rsprefcount = 0 + self.rspfile_quote_style = rspfile_quote_style + + if self.depfile == '$DEPFILE': + self.depfile += '_UNQUOTED' + + @staticmethod + def _quoter(x, qf = quote_func): + if isinstance(x, NinjaCommandArg): + if x.quoting == Quoting.none: + return x.s + elif x.quoting == Quoting.notNinja: + return qf(x.s) + elif x.quoting == Quoting.notShell: + return ninja_quote(x.s) + # fallthrough + return ninja_quote(qf(str(x))) + + def write(self, outfile): + if self.rspfile_quote_style is RSPFileSyntax.MSVC: + rspfile_quote_func = cmd_quote + else: + rspfile_quote_func = gcc_rsp_quote + + def rule_iter(): + if self.refcount: + yield '' + if self.rsprefcount: + yield '_RSP' + + for rsp in rule_iter(): + outfile.write(f'rule {self.name}{rsp}\n') + if rsp == '_RSP': + outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command]))) + outfile.write(' rspfile = $out.rsp\n') + outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args]))) + else: + outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)]))) + if self.deps: + outfile.write(f' deps = {self.deps}\n') + if self.depfile: + outfile.write(f' depfile = {self.depfile}\n') + outfile.write(f' description = {self.description}\n') + if self.extra: + for l in self.extra.split('\n'): + outfile.write(' ') + outfile.write(l) + outfile.write('\n') + outfile.write('\n') + + def length_estimate(self, infiles, outfiles, elems): + # determine variables + # this order of actions only approximates ninja's scoping rules, as + # documented at: https://ninja-build.org/manual.html#ref_scope + ninja_vars = {} + for e in elems: + (name, value) = e + ninja_vars[name] = value + ninja_vars['deps'] = self.deps + ninja_vars['depfile'] = self.depfile + ninja_vars['in'] = infiles + ninja_vars['out'] = outfiles + + # expand variables in command + command = ' '.join([self._quoter(x) for x in self.command + self.args]) + estimate = len(command) + for m in re.finditer(r'(\${\w+}|\$\w+)?[^$]*', command): + if m.start(1) != -1: + estimate -= m.end(1) - m.start(1) + 1 + chunk = m.group(1) + if chunk[1] == '{': + chunk = chunk[2:-1] + else: + chunk = chunk[1:] + chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty + estimate += len(' '.join(chunk)) + + # determine command length + return estimate + +class NinjaBuildElement: + def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None): + self.implicit_outfilenames = implicit_outs or [] + if isinstance(outfilenames, str): + self.outfilenames = [outfilenames] + else: + self.outfilenames = outfilenames + assert(isinstance(rulename, str)) + self.rulename = rulename + if isinstance(infilenames, str): + self.infilenames = [infilenames] + else: + self.infilenames = infilenames + self.deps = OrderedSet() + self.orderdeps = OrderedSet() + self.elems = [] + self.all_outputs = all_outputs + + def add_dep(self, dep): + if isinstance(dep, list): + self.deps.update(dep) + else: + self.deps.add(dep) + + def add_orderdep(self, dep): + if isinstance(dep, list): + self.orderdeps.update(dep) + else: + self.orderdeps.add(dep) + + def add_item(self, name, elems): + # Always convert from GCC-style argument naming to the naming used by the + # current compiler. Also filter system include paths, deduplicate, etc. + if isinstance(elems, CompilerArgs): + elems = elems.to_native() + if isinstance(elems, str): + elems = [elems] + self.elems.append((name, elems)) + + if name == 'DEPFILE': + self.elems.append((name + '_UNQUOTED', elems)) + + def _should_use_rspfile(self): + # 'phony' is a rule built-in to ninja + if self.rulename == 'phony': + return False + + if not self.rule.rspable: + return False + + infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + + return self.rule.length_estimate(infilenames, + outfilenames, + self.elems) >= rsp_threshold + + def count_rule_references(self): + if self.rulename != 'phony': + if self._should_use_rspfile(): + self.rule.rsprefcount += 1 + else: + self.rule.refcount += 1 + + def write(self, outfile): + self.check_outputs() + ins = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outs = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) + if implicit_outs: + implicit_outs = ' | ' + implicit_outs + use_rspfile = self._should_use_rspfile() + if use_rspfile: + rulename = self.rulename + '_RSP' + mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames) + else: + rulename = self.rulename + line = f'build {outs}{implicit_outs}: {rulename} {ins}' + if len(self.deps) > 0: + line += ' | ' + ' '.join([ninja_quote(x, True) for x in sorted(self.deps)]) + if len(self.orderdeps) > 0: + line += ' || ' + ' '.join([ninja_quote(x, True) for x in sorted(self.orderdeps)]) + line += '\n' + # This is the only way I could find to make this work on all + # platforms including Windows command shell. Slash is a dir separator + # on Windows, too, so all characters are unambiguous and, more importantly, + # do not require quoting, unless explicitly specified, which is necessary for + # the csc compiler. + line = line.replace('\\', '/') + if mesonlib.is_windows(): + # Support network paths as backslash, otherwise they are interpreted as + # arguments for compile/link commands when using MSVC + line = ' '.join( + (l.replace('//', '\\\\', 1) if l.startswith('//') else l) + for l in line.split(' ') + ) + outfile.write(line) + + if use_rspfile: + if self.rule.rspfile_quote_style is RSPFileSyntax.MSVC: + qf = cmd_quote + else: + qf = gcc_rsp_quote + else: + qf = quote_func + + for e in self.elems: + (name, elems) = e + should_quote = name not in raw_names + line = f' {name} = ' + newelems = [] + for i in elems: + if not should_quote or i == '&&': # Hackety hack hack + newelems.append(ninja_quote(i)) + else: + newelems.append(ninja_quote(qf(i))) + line += ' '.join(newelems) + line += '\n' + outfile.write(line) + outfile.write('\n') + + def check_outputs(self): + for n in self.outfilenames: + if n in self.all_outputs: + raise MesonException(f'Multiple producers for Ninja target "{n}". Please rename your targets.') + self.all_outputs[n] = True + +class NinjaBackend(backends.Backend): + + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'ninja' + self.ninja_filename = 'build.ninja' + self.fortran_deps = {} + self.all_outputs = {} + self.introspection_data = {} + self.created_llvm_ir_rule = PerMachine(False, False) + + def create_target_alias(self, to_target): + # We need to use aliases for targets that might be used as directory + # names to workaround a Ninja bug that breaks `ninja -t clean`. + # This is used for 'reserved' targets such as 'test', 'install', + # 'benchmark', etc, and also for RunTargets. + # https://github.com/mesonbuild/meson/issues/1644 + if not to_target.startswith('meson-'): + raise AssertionError(f'Invalid usage of create_target_alias with {to_target!r}') + from_target = to_target[len('meson-'):] + elem = NinjaBuildElement(self.all_outputs, from_target, 'phony', to_target) + self.add_build(elem) + + def detect_vs_dep_prefix(self, tempfilename): + '''VS writes its dependency in a locale dependent format. + Detect the search prefix to use.''' + # TODO don't hard-code host + for compiler in self.environment.coredata.compilers.host.values(): + # Have to detect the dependency format + + # IFort on windows is MSVC like, but doesn't have /showincludes + if isinstance(compiler, FortranCompiler): + continue + if isinstance(compiler, PGICCompiler) and mesonlib.is_windows(): + # for the purpose of this function, PGI doesn't act enough like MSVC + return open(tempfilename, 'a', encoding='utf-8') + if isinstance(compiler, VisualStudioLikeCompiler): + break + else: + # None of our compilers are MSVC, we're done. + return open(tempfilename, 'a', encoding='utf-8') + filename = os.path.join(self.environment.get_scratch_dir(), + 'incdetect.c') + with open(filename, 'w', encoding='utf-8') as f: + f.write(dedent('''\ + #include + int dummy; + ''')) + + # The output of cl dependency information is language + # and locale dependent. Any attempt at converting it to + # Python strings leads to failure. We _must_ do this detection + # in raw byte mode and write the result in raw bytes. + pc = subprocess.Popen(compiler.get_exelist() + + ['/showIncludes', '/c', 'incdetect.c'], + cwd=self.environment.get_scratch_dir(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (stdout, stderr) = pc.communicate() + + # We want to match 'Note: including file: ' in the line + # 'Note: including file: d:\MyDir\include\stdio.h', however + # different locales have different messages with a different + # number of colons. Match up to the the drive name 'd:\'. + # When used in cross compilation, the path separator is a + # forward slash rather than a backslash so handle both; i.e. + # the path is /MyDir/include/stdio.h. + # With certain cross compilation wrappings of MSVC, the paths + # use backslashes, but without the leading drive name, so + # allow the path to start with any path separator, i.e. + # \MyDir\include\stdio.h. + matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|[\\\/]).*stdio.h$") + + def detect_prefix(out): + for line in re.split(rb'\r?\n', out): + match = matchre.match(line) + if match: + with open(tempfilename, 'ab') as binfile: + binfile.write(b'msvc_deps_prefix = ' + match.group(1) + b'\n') + return open(tempfilename, 'a', encoding='utf-8') + return None + + # Some cl wrappers (e.g. Squish Coco) output dependency info + # to stderr rather than stdout + result = detect_prefix(stdout) or detect_prefix(stderr) + if result: + return result + + raise MesonException('Could not determine vs dep dependency prefix string.') + + def generate(self): + ninja = environment.detect_ninja_command_and_version(log=True) + if need_setup_vsenv: + builddir = Path(self.environment.get_build_dir()) + builddir = builddir.relative_to(Path.cwd()) + meson_command = mesonlib.join_args(mesonlib.get_meson_command()) + mlog.log() + mlog.log('Visual Studio environment is needed to run Ninja. It is recommended to use Meson wrapper:') + mlog.log(f'{meson_command} compile -C {builddir}') + if ninja is None: + raise MesonException('Could not detect Ninja v1.8.2 or newer') + (self.ninja_command, self.ninja_version) = ninja + outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) + tempfilename = outfilename + '~' + with open(tempfilename, 'w', encoding='utf-8') as outfile: + outfile.write(f'# This is the build file for project "{self.build.get_project()}"\n') + outfile.write('# It is autogenerated by the Meson build system.\n') + outfile.write('# Do not edit by hand.\n\n') + outfile.write('ninja_required_version = 1.8.2\n\n') + + num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + if num_pools > 0: + outfile.write(f'''pool link_pool + depth = {num_pools} + +''') + + with self.detect_vs_dep_prefix(tempfilename) as outfile: + self.generate_rules() + + self.build_elements = [] + self.generate_phony() + self.add_build_comment(NinjaComment('Build rules for targets')) + for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'): + self.generate_target(t) + self.add_build_comment(NinjaComment('Test rules')) + self.generate_tests() + self.add_build_comment(NinjaComment('Install rules')) + self.generate_install() + self.generate_dist() + key = OptionKey('b_coverage') + if (key in self.environment.coredata.options and + self.environment.coredata.options[key].value): + self.add_build_comment(NinjaComment('Coverage rules')) + self.generate_coverage_rules() + self.add_build_comment(NinjaComment('Suffix')) + self.generate_utils() + self.generate_ending() + + self.write_rules(outfile) + self.write_builds(outfile) + + default = 'default all\n\n' + outfile.write(default) + # Only overwrite the old build file after the new one has been + # fully created. + os.replace(tempfilename, outfilename) + mlog.cmd_ci_include(outfilename) # For CI debugging + self.generate_compdb() + + # http://clang.llvm.org/docs/JSONCompilationDatabase.html + def generate_compdb(self): + rules = [] + # TODO: Rather than an explicit list here, rules could be marked in the + # rule store as being wanted in compdb + for for_machine in MachineChoice: + for lang in self.environment.coredata.compilers[for_machine]: + rules += [f"{rule}{ext}" for rule in [self.get_compiler_rule_name(lang, for_machine)] + for ext in ['', '_RSP']] + rules += [f"{rule}{ext}" for rule in [self.get_pch_rule_name(lang, for_machine)] + for ext in ['', '_RSP']] + compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else [] + ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules + builddir = self.environment.get_build_dir() + try: + jsondb = subprocess.check_output(ninja_compdb, cwd=builddir) + with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f: + f.write(jsondb) + except Exception: + mlog.warning('Could not create compilation database.') + + # Get all generated headers. Any source file might need them so + # we need to add an order dependency to them. + def get_generated_headers(self, target): + if hasattr(target, 'cached_generated_headers'): + return target.cached_generated_headers + header_deps = [] + # XXX: Why don't we add deps to CustomTarget headers here? + for genlist in target.get_generated_sources(): + if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)): + continue + for src in genlist.get_outputs(): + if self.environment.is_header(src): + header_deps.append(self.get_target_generated_dir(target, genlist, src)) + if 'vala' in target.compilers and not isinstance(target, build.Executable): + vala_header = File.from_built_file(self.get_target_dir(target), target.vala_header) + header_deps.append(vala_header) + # Recurse and find generated headers + for dep in itertools.chain(target.link_targets, target.link_whole_targets): + if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): + header_deps += self.get_generated_headers(dep) + target.cached_generated_headers = header_deps + return header_deps + + def get_target_generated_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]: + """ + Returns a dictionary with the keys being the path to the file + (relative to the build directory) of that type and the value + being the GeneratorList or CustomTarget that generated it. + """ + srcs: T.MutableMapping[str, File] = OrderedDict() + for gensrc in target.get_generated_sources(): + for s in gensrc.get_outputs(): + f = self.get_target_generated_dir(target, gensrc, s) + srcs[f] = s + return srcs + + def get_target_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]: + srcs: T.MutableMapping[str, File] = OrderedDict() + for s in target.get_sources(): + # BuildTarget sources are always mesonlib.File files which are + # either in the source root, or generated with configure_file and + # in the build root + if not isinstance(s, File): + raise InvalidArguments(f'All sources in target {s!r} must be of type mesonlib.File') + f = s.rel_to_builddir(self.build_to_src) + srcs[f] = s + return srcs + + def get_target_source_can_unity(self, target, source): + if isinstance(source, File): + source = source.fname + if self.environment.is_llvm_ir(source) or \ + self.environment.is_assembly(source): + return False + suffix = os.path.splitext(source)[1][1:].lower() + for lang in backends.LANGS_CANT_UNITY: + if lang not in target.compilers: + continue + if suffix in target.compilers[lang].file_suffixes: + return False + return True + + def create_target_source_introspection(self, target: build.Target, comp: compilers.Compiler, parameters, sources, generated_sources): + ''' + Adds the source file introspection information for a language of a target + + Internal introspection storage formart: + self.introspection_data = { + '': { + : { + 'language: 'lang', + 'compiler': ['comp', 'exe', 'list'], + 'parameters': ['UNIQUE', 'parameter', 'list'], + 'sources': [], + 'generated_sources': [], + } + } + } + ''' + tid = target.get_id() + lang = comp.get_language() + tgt = self.introspection_data[tid] + # Find an existing entry or create a new one + id_hash = (lang, tuple(parameters)) + src_block = tgt.get(id_hash, None) + if src_block is None: + # Convert parameters + if isinstance(parameters, CompilerArgs): + parameters = parameters.to_native(copy=True) + parameters = comp.compute_parameters_with_absolute_paths(parameters, self.build_dir) + # The new entry + src_block = { + 'language': lang, + 'compiler': comp.get_exelist(), + 'parameters': parameters, + 'sources': [], + 'generated_sources': [], + } + tgt[id_hash] = src_block + # Make source files absolute + sources = [x.absolute_path(self.source_dir, self.build_dir) if isinstance(x, File) else os.path.normpath(os.path.join(self.build_dir, x)) + for x in sources] + generated_sources = [x.absolute_path(self.source_dir, self.build_dir) if isinstance(x, File) else os.path.normpath(os.path.join(self.build_dir, x)) + for x in generated_sources] + # Add the source files + src_block['sources'] += sources + src_block['generated_sources'] += generated_sources + + def generate_target(self, target): + try: + if isinstance(target, build.BuildTarget): + os.makedirs(self.get_target_private_dir_abs(target)) + except FileExistsError: + pass + if isinstance(target, build.CustomTarget): + self.generate_custom_target(target) + if isinstance(target, build.RunTarget): + self.generate_run_target(target) + compiled_sources = [] + source2object = {} + name = target.get_id() + if name in self.processed_targets: + return + self.processed_targets.add(name) + # Initialize an empty introspection source list + self.introspection_data[name] = {} + # Generate rules for all dependency targets + self.process_target_dependencies(target) + # If target uses a language that cannot link to C objects, + # just generate for that language and return. + if isinstance(target, build.Jar): + self.generate_jar_target(target) + return + if target.uses_rust(): + self.generate_rust_target(target) + return + if 'cs' in target.compilers: + self.generate_cs_target(target) + return + if 'swift' in target.compilers: + self.generate_swift_target(target) + return + + # Pre-existing target C/C++ sources to be built; dict of full path to + # source relative to build root and the original File object. + target_sources: T.MutableMapping[str, File] + + # GeneratedList and CustomTarget sources to be built; dict of the full + # path to source relative to build root and the generating target/list + generated_sources: T.MutableMapping[str, File] + + # List of sources that have been transpiled from a DSL (like Vala) into + # a language that is haneled below, such as C or C++ + transpiled_sources: T.List[str] + + if 'vala' in target.compilers: + # Sources consumed by valac are filtered out. These only contain + # C/C++ sources, objects, generated libs, and unknown sources now. + target_sources, generated_sources, \ + transpiled_sources = self.generate_vala_compile(target) + elif 'cython' in target.compilers: + target_sources, generated_sources, \ + transpiled_sources = self.generate_cython_transpile(target) + else: + target_sources = self.get_target_sources(target) + generated_sources = self.get_target_generated_sources(target) + transpiled_sources = [] + self.scan_fortran_module_outputs(target) + # Generate rules for GeneratedLists + self.generate_generator_list_rules(target) + + # Generate rules for building the remaining source files in this target + outname = self.get_target_filename(target) + obj_list = [] + is_unity = self.is_unity(target) + header_deps = [] + unity_src = [] + unity_deps = [] # Generated sources that must be built before compiling a Unity target. + header_deps += self.get_generated_headers(target) + + if is_unity: + # Warn about incompatible sources if a unity build is enabled + langs = set(target.compilers.keys()) + langs_cant = langs.intersection(backends.LANGS_CANT_UNITY) + if langs_cant: + langs_are = langs = ', '.join(langs_cant).upper() + langs_are += ' are' if len(langs_cant) > 1 else ' is' + msg = f'{langs_are} not supported in Unity builds yet, so {langs} ' \ + f'sources in the {target.name!r} target will be compiled normally' + mlog.log(mlog.red('FIXME'), msg) + + # Get a list of all generated headers that will be needed while building + # this target's sources (generated sources and pre-existing sources). + # This will be set as dependencies of all the target's sources. At the + # same time, also deal with generated sources that need to be compiled. + generated_source_files = [] + for rel_src in generated_sources.keys(): + dirpart, fnamepart = os.path.split(rel_src) + raw_src = File(True, dirpart, fnamepart) + if self.environment.is_source(rel_src) and not self.environment.is_header(rel_src): + if is_unity and self.get_target_source_can_unity(target, rel_src): + unity_deps.append(raw_src) + abs_src = os.path.join(self.environment.get_build_dir(), rel_src) + unity_src.append(abs_src) + else: + generated_source_files.append(raw_src) + elif self.environment.is_object(rel_src): + obj_list.append(rel_src) + elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src): + pass + else: + # Assume anything not specifically a source file is a header. This is because + # people generate files with weird suffixes (.inc, .fh) that they then include + # in their source files. + header_deps.append(raw_src) + # These are the generated source files that need to be built for use by + # this target. We create the Ninja build file elements for this here + # because we need `header_deps` to be fully generated in the above loop. + for src in generated_source_files: + if self.environment.is_llvm_ir(src): + o, s = self.generate_llvm_ir_compile(target, src) + else: + o, s = self.generate_single_compile(target, src, True, + order_deps=header_deps) + compiled_sources.append(s) + source2object[s] = o + obj_list.append(o) + + use_pch = self.environment.coredata.options.get(OptionKey('b_pch')) + if use_pch and target.has_pch(): + pch_objects = self.generate_pch(target, header_deps=header_deps) + else: + pch_objects = [] + + # Generate compilation targets for C sources generated from Vala + # sources. This can be extended to other $LANG->C compilers later if + # necessary. This needs to be separate for at least Vala + # + # Do not try to unity-build the generated c files from vala, as these + # often contain duplicate symbols and will fail to compile properly + vala_generated_source_files = [] + for src in transpiled_sources: + dirpart, fnamepart = os.path.split(src) + raw_src = File(True, dirpart, fnamepart) + # Generated targets are ordered deps because the must exist + # before the sources compiling them are used. After the first + # compile we get precise dependency info from dep files. + # This should work in all cases. If it does not, then just + # move them from orderdeps to proper deps. + if self.environment.is_header(src): + header_deps.append(raw_src) + else: + # We gather all these and generate compile rules below + # after `header_deps` (above) is fully generated + vala_generated_source_files.append(raw_src) + for src in vala_generated_source_files: + # Passing 'vala' here signifies that we want the compile + # arguments to be specialized for C code generated by + # valac. For instance, no warnings should be emitted. + o, s = self.generate_single_compile(target, src, 'vala', [], header_deps) + obj_list.append(o) + + # Generate compile targets for all the pre-existing sources for this target + for src in target_sources.values(): + if not self.environment.is_header(src): + if self.environment.is_llvm_ir(src): + o, s = self.generate_llvm_ir_compile(target, src) + obj_list.append(o) + elif is_unity and self.get_target_source_can_unity(target, src): + abs_src = os.path.join(self.environment.get_build_dir(), + src.rel_to_builddir(self.build_to_src)) + unity_src.append(abs_src) + else: + o, s = self.generate_single_compile(target, src, False, [], header_deps) + obj_list.append(o) + compiled_sources.append(s) + source2object[s] = o + + obj_list += self.flatten_object_list(target) + if is_unity: + for src in self.generate_unity_files(target, unity_src): + o, s = self.generate_single_compile(target, src, True, unity_deps + header_deps) + obj_list.append(o) + compiled_sources.append(s) + source2object[s] = o + linker, stdlib_args = self.determine_linker_and_stdlib_args(target) + if isinstance(target, build.StaticLibrary) and target.prelink: + final_obj_list = self.generate_prelink(target, obj_list) + else: + final_obj_list = obj_list + elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args) + self.generate_dependency_scan_target(target, compiled_sources, source2object) + self.generate_shlib_aliases(target, self.get_target_dir(target)) + self.add_build(elem) + + def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: + if mesonlib.version_compare(self.ninja_version, '<1.10.0'): + return False + if 'fortran' in target.compilers: + return True + if 'cpp' not in target.compilers: + return False + # Currently only the preview version of Visual Studio is supported. + cpp = target.compilers['cpp'] + if cpp.get_id() != 'msvc': + return False + cppversion = self.environment.coredata.options[OptionKey('std', machine=target.for_machine, lang='cpp')].value + if cppversion not in ('latest', 'c++latest', 'vc++latest'): + return False + if not mesonlib.current_vs_supports_modules(): + return False + if mesonlib.version_compare(cpp.version, '<19.28.28617'): + return False + return True + + def generate_dependency_scan_target(self, target, compiled_sources, source2object): + if not self.should_use_dyndeps_for_target(target): + return + depscan_file = self.get_dep_scan_file_for(target) + pickle_base = target.name + '.dat' + pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/') + pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/') + rule_name = 'depscan' + scan_sources = self.select_sources_to_scan(compiled_sources) + elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, scan_sources) + elem.add_item('picklefile', pickle_file) + scaninfo = TargetDependencyScannerInfo(self.get_target_private_dir(target), source2object) + with open(pickle_abs, 'wb') as p: + pickle.dump(scaninfo, p) + self.add_build(elem) + + def select_sources_to_scan(self, compiled_sources): + # in practice pick up C++ and Fortran files. If some other language + # requires scanning (possibly Java to deal with inner class files) + # then add them here. + all_suffixes = set(compilers.lang_suffixes['cpp']) | set(compilers.lang_suffixes['fortran']) + selected_sources = [] + for source in compiled_sources: + ext = os.path.splitext(source)[1][1:].lower() + if ext in all_suffixes: + selected_sources.append(source) + return selected_sources + + def process_target_dependencies(self, target): + for t in target.get_dependencies(): + if t.get_id() not in self.processed_targets: + self.generate_target(t) + + def custom_target_generator_inputs(self, target): + for s in target.sources: + if isinstance(s, build.GeneratedList): + self.generate_genlist_for_target(s, target) + + def unwrap_dep_list(self, target): + deps = [] + for i in target.get_dependencies(): + # FIXME, should not grab element at zero but rather expand all. + if isinstance(i, list): + i = i[0] + # Add a dependency on all the outputs of this target + for output in i.get_outputs(): + deps.append(os.path.join(self.get_target_dir(i), output)) + return deps + + def generate_custom_target(self, target): + self.custom_target_generator_inputs(target) + (srcs, ofilenames, cmd) = self.eval_custom_target_command(target) + deps = self.unwrap_dep_list(target) + deps += self.get_custom_target_depend_files(target) + if target.build_always_stale: + deps.append('PHONY') + if target.depfile is None: + rulename = 'CUSTOM_COMMAND' + else: + rulename = 'CUSTOM_COMMAND_DEP' + elem = NinjaBuildElement(self.all_outputs, ofilenames, rulename, srcs) + elem.add_dep(deps) + for d in target.extra_depends: + # Add a dependency on all the outputs of this target + for output in d.get_outputs(): + elem.add_dep(os.path.join(self.get_target_dir(d), output)) + + cmd, reason = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:], + extra_bdeps=target.get_transitive_build_target_deps(), + capture=ofilenames[0] if target.capture else None, + feed=srcs[0] if target.feed else None, + env=target.env) + if reason: + cmd_type = f' (wrapped by meson {reason})' + else: + cmd_type = '' + if target.depfile is not None: + depfile = target.get_dep_outname(elem.infilenames) + rel_dfile = os.path.join(self.get_target_dir(target), depfile) + abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(abs_pdir, exist_ok=True) + elem.add_item('DEPFILE', rel_dfile) + if target.console: + elem.add_item('pool', 'console') + elem.add_item('COMMAND', cmd) + elem.add_item('description', f'Generating {target.name} with a custom command{cmd_type}') + self.add_build(elem) + self.processed_targets.add(target.get_id()) + + def build_run_target_name(self, target): + if target.subproject != '': + subproject_prefix = f'{target.subproject}@@' + else: + subproject_prefix = '' + return f'{subproject_prefix}{target.name}' + + def generate_run_target(self, target): + target_name = self.build_run_target_name(target) + if not target.command: + # This is an alias target, it has no command, it just depends on + # other targets. + elem = NinjaBuildElement(self.all_outputs, target_name, 'phony', []) + else: + target_env = self.get_run_target_env(target) + _, _, cmd = self.eval_custom_target_command(target) + meson_exe_cmd, reason = self.as_meson_exe_cmdline(target_name, target.command[0], cmd[1:], + force_serialize=True, env=target_env, + verbose=True) + cmd_type = f' (wrapped by meson {reason})' + internal_target_name = f'meson-{target_name}' + elem = NinjaBuildElement(self.all_outputs, internal_target_name, 'CUSTOM_COMMAND', []) + elem.add_item('COMMAND', meson_exe_cmd) + elem.add_item('description', f'Running external command {target.name}{cmd_type}') + elem.add_item('pool', 'console') + # Alias that runs the target defined above with the name the user specified + self.create_target_alias(internal_target_name) + deps = self.unwrap_dep_list(target) + deps += self.get_custom_target_depend_files(target) + elem.add_dep(deps) + self.add_build(elem) + self.processed_targets.add(target.get_id()) + + def generate_coverage_command(self, elem, outputs): + targets = self.build.get_targets().values() + use_llvm_cov = False + for target in targets: + if not hasattr(target, 'compilers'): + continue + for compiler in target.compilers.values(): + if compiler.get_id() == 'clang' and not compiler.info.is_darwin(): + use_llvm_cov = True + break + elem.add_item('COMMAND', self.environment.get_build_command() + + ['--internal', 'coverage'] + + outputs + + [self.environment.get_source_dir(), + os.path.join(self.environment.get_source_dir(), + self.build.get_subproject_dir()), + self.environment.get_build_dir(), + self.environment.get_log_dir()] + + (['--use_llvm_cov'] if use_llvm_cov else [])) + + def generate_coverage_rules(self): + e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, []) + e.add_item('description', 'Generates coverage reports') + self.add_build(e) + # Alias that runs the target defined above + self.create_target_alias('meson-coverage') + self.generate_coverage_legacy_rules() + + def generate_coverage_legacy_rules(self): + e = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--xml']) + e.add_item('description', 'Generates XML coverage report') + self.add_build(e) + # Alias that runs the target defined above + self.create_target_alias('meson-coverage-xml') + + e = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--text']) + e.add_item('description', 'Generates text coverage report') + self.add_build(e) + # Alias that runs the target defined above + self.create_target_alias('meson-coverage-text') + + e = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--html']) + e.add_item('description', 'Generates HTML coverage report') + self.add_build(e) + # Alias that runs the target defined above + self.create_target_alias('meson-coverage-html') + + def generate_install(self): + self.create_install_data_files() + elem = NinjaBuildElement(self.all_outputs, 'meson-install', 'CUSTOM_COMMAND', 'PHONY') + elem.add_dep('all') + elem.add_item('DESC', 'Installing files.') + elem.add_item('COMMAND', self.environment.get_build_command() + ['install', '--no-rebuild']) + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the target defined above + self.create_target_alias('meson-install') + + def generate_tests(self): + self.serialize_tests() + cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild'] + if not self.environment.coredata.get_option(OptionKey('stdsplit')): + cmd += ['--no-stdsplit'] + if self.environment.coredata.get_option(OptionKey('errorlogs')): + cmd += ['--print-errorlogs'] + elem = NinjaBuildElement(self.all_outputs, 'meson-test', 'CUSTOM_COMMAND', ['all', 'PHONY']) + elem.add_item('COMMAND', cmd) + elem.add_item('DESC', 'Running all tests.') + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the above-defined meson-test target + self.create_target_alias('meson-test') + + # And then benchmarks. + cmd = self.environment.get_build_command(True) + [ + 'test', '--benchmark', '--logbase', + 'benchmarklog', '--num-processes=1', '--no-rebuild'] + elem = NinjaBuildElement(self.all_outputs, 'meson-benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY']) + elem.add_item('COMMAND', cmd) + elem.add_item('DESC', 'Running benchmark suite.') + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the above-defined meson-benchmark target + self.create_target_alias('meson-benchmark') + + def generate_rules(self): + self.rules = [] + self.ruledict = {} + + self.add_rule_comment(NinjaComment('Rules for module scanning.')) + self.generate_scanner_rules() + self.add_rule_comment(NinjaComment('Rules for compiling.')) + self.generate_compile_rules() + self.add_rule_comment(NinjaComment('Rules for linking.')) + self.generate_static_link_rules() + self.generate_dynamic_link_rules() + self.add_rule_comment(NinjaComment('Other rules')) + # Ninja errors out if you have deps = gcc but no depfile, so we must + # have two rules for custom commands. + self.add_rule(NinjaRule('CUSTOM_COMMAND', ['$COMMAND'], [], '$DESC', + extra='restat = 1')) + self.add_rule(NinjaRule('CUSTOM_COMMAND_DEP', ['$COMMAND'], [], '$DESC', + deps='gcc', depfile='$DEPFILE', + extra='restat = 1')) + + c = self.environment.get_build_command() + \ + ['--internal', + 'regenerate', + self.environment.get_source_dir(), + self.environment.get_build_dir(), + '--backend', + 'ninja'] + self.add_rule(NinjaRule('REGENERATE_BUILD', + c, [], + 'Regenerating build files.', + extra='generator = 1')) + + def add_rule_comment(self, comment): + self.rules.append(comment) + + def add_build_comment(self, comment): + self.build_elements.append(comment) + + def add_rule(self, rule): + if rule.name in self.ruledict: + raise MesonException(f'Tried to add rule {rule.name} twice.') + self.rules.append(rule) + self.ruledict[rule.name] = rule + + def add_build(self, build): + self.build_elements.append(build) + + if build.rulename != 'phony': + # reference rule + if build.rulename in self.ruledict: + build.rule = self.ruledict[build.rulename] + else: + mlog.warning(f"build statement for {build.outfilenames} references non-existent rule {build.rulename}") + + def write_rules(self, outfile): + for b in self.build_elements: + if isinstance(b, NinjaBuildElement): + b.count_rule_references() + + for r in self.rules: + r.write(outfile) + + def write_builds(self, outfile): + for b in ProgressBar(self.build_elements, desc='Writing build.ninja'): + b.write(outfile) + + def generate_phony(self): + self.add_build_comment(NinjaComment('Phony build target, always out of date')) + elem = NinjaBuildElement(self.all_outputs, 'PHONY', 'phony', '') + self.add_build(elem) + + def generate_jar_target(self, target): + fname = target.get_filename() + outname_rel = os.path.join(self.get_target_dir(target), fname) + src_list = target.get_sources() + class_list = [] + compiler = target.compilers['java'] + c = 'c' + m = 'm' + e = '' + f = 'f' + main_class = target.get_main_class() + if main_class != '': + e = 'e' + + # Add possible java generated files to src list + generated_sources = self.get_target_generated_sources(target) + gen_src_list = [] + for rel_src in generated_sources.keys(): + dirpart, fnamepart = os.path.split(rel_src) + raw_src = File(True, dirpart, fnamepart) + if rel_src.endswith('.java'): + gen_src_list.append(raw_src) + + compile_args = self.determine_single_java_compile_args(target, compiler) + for src in src_list + gen_src_list: + plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args) + class_list.append(plain_class_path) + class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list] + manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF') + manifest_fullpath = os.path.join(self.environment.get_build_dir(), manifest_path) + os.makedirs(os.path.dirname(manifest_fullpath), exist_ok=True) + with open(manifest_fullpath, 'w', encoding='utf-8') as manifest: + if any(target.link_targets): + manifest.write('Class-Path: ') + cp_paths = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + manifest.write(' '.join(cp_paths)) + manifest.write('\n') + jar_rule = 'java_LINKER' + commands = [c + m + e + f] + commands.append(manifest_path) + if e != '': + commands.append(main_class) + commands.append(self.get_target_filename(target)) + # Java compilation can produce an arbitrary number of output + # class files for a single source file. Thus tell jar to just + # grab everything in the final package. + commands += ['-C', self.get_target_private_dir(target), '.'] + elem = NinjaBuildElement(self.all_outputs, outname_rel, jar_rule, []) + elem.add_dep(class_dep_list) + elem.add_item('ARGS', commands) + self.add_build(elem) + # Create introspection information + self.create_target_source_introspection(target, compiler, compile_args, src_list, gen_src_list) + + def generate_cs_resource_tasks(self, target): + args = [] + deps = [] + for r in target.resources: + rel_sourcefile = os.path.join(self.build_to_src, target.subdir, r) + if r.endswith('.resources'): + a = '-resource:' + rel_sourcefile + elif r.endswith('.txt') or r.endswith('.resx'): + ofilebase = os.path.splitext(os.path.basename(r))[0] + '.resources' + ofilename = os.path.join(self.get_target_private_dir(target), ofilebase) + elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile) + elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) + elem.add_item('DESC', f'Compiling resource {rel_sourcefile}') + self.add_build(elem) + deps.append(ofilename) + a = '-resource:' + ofilename + else: + raise InvalidArguments(f'Unknown resource file {r}.') + args.append(a) + return args, deps + + def generate_cs_target(self, target: build.BuildTarget): + buildtype = self.get_option_for_target(OptionKey('buildtype'), target) + fname = target.get_filename() + outname_rel = os.path.join(self.get_target_dir(target), fname) + src_list = target.get_sources() + compiler = target.compilers['cs'] + rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list] + deps = [] + commands = compiler.compiler_args(target.extra_args.get('cs', [])) + commands += compiler.get_buildtype_args(buildtype) + commands += compiler.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target)) + commands += compiler.get_debug_args(self.get_option_for_target(OptionKey('debug'), target)) + if isinstance(target, build.Executable): + commands.append('-target:exe') + elif isinstance(target, build.SharedLibrary): + commands.append('-target:library') + else: + raise MesonException('Unknown C# target type.') + (resource_args, resource_deps) = self.generate_cs_resource_tasks(target) + commands += resource_args + deps += resource_deps + commands += compiler.get_output_args(outname_rel) + for l in target.link_targets: + lname = os.path.join(self.get_target_dir(l), l.get_filename()) + commands += compiler.get_link_args(lname) + deps.append(lname) + if '-g' in commands: + outputs = [outname_rel, outname_rel + '.mdb'] + else: + outputs = [outname_rel] + generated_sources = self.get_target_generated_sources(target) + generated_rel_srcs = [] + for rel_src in generated_sources.keys(): + if rel_src.lower().endswith('.cs'): + generated_rel_srcs.append(os.path.normpath(rel_src)) + deps.append(os.path.normpath(rel_src)) + + for dep in target.get_external_deps(): + commands.extend_direct(dep.get_link_args()) + commands += self.build.get_project_args(compiler, target.subproject, target.for_machine) + commands += self.build.get_global_args(compiler, target.for_machine) + + elem = NinjaBuildElement(self.all_outputs, outputs, self.get_compiler_rule_name('cs', target.for_machine), rel_srcs + generated_rel_srcs) + elem.add_dep(deps) + elem.add_item('ARGS', commands) + self.add_build(elem) + + self.generate_generator_list_rules(target) + self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs) + + def determine_single_java_compile_args(self, target, compiler): + args = [] + args += compiler.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target)) + args += self.build.get_global_args(compiler, target.for_machine) + args += self.build.get_project_args(compiler, target.subproject, target.for_machine) + args += target.get_java_args() + args += compiler.get_output_args(self.get_target_private_dir(target)) + args += target.get_classpath_args() + curdir = target.get_subdir() + sourcepath = os.path.join(self.build_to_src, curdir) + os.pathsep + sourcepath += os.path.normpath(curdir) + os.pathsep + for i in target.include_dirs: + for idir in i.get_incdirs(): + sourcepath += os.path.join(self.build_to_src, i.curdir, idir) + os.pathsep + args += ['-sourcepath', sourcepath] + return args + + def generate_single_java_compile(self, src, target, compiler, args): + deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + generated_sources = self.get_target_generated_sources(target) + for rel_src in generated_sources.keys(): + if rel_src.endswith('.java'): + deps.append(rel_src) + rel_src = src.rel_to_builddir(self.build_to_src) + plain_class_path = src.fname[:-4] + 'class' + rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) + element = NinjaBuildElement(self.all_outputs, rel_obj, self.compiler_to_rule_name(compiler), rel_src) + element.add_dep(deps) + element.add_item('ARGS', args) + self.add_build(element) + return plain_class_path + + def generate_java_link(self): + rule = 'java_LINKER' + command = ['jar', '$ARGS'] + description = 'Creating JAR $out' + self.add_rule(NinjaRule(rule, command, [], description)) + + def determine_dep_vapis(self, target): + """ + Peek into the sources of BuildTargets we're linking with, and if any of + them was built with Vala, assume that it also generated a .vapi file of + the same name as the BuildTarget and return the path to it relative to + the build directory. + """ + result = OrderedSet() + for dep in itertools.chain(target.link_targets, target.link_whole_targets): + if not dep.is_linkable_target(): + continue + for i in dep.sources: + if hasattr(i, 'fname'): + i = i.fname + if i.endswith('vala'): + vapiname = dep.vala_vapi + fullname = os.path.join(self.get_target_dir(dep), vapiname) + result.add(fullname) + break + return list(result) + + def split_vala_sources(self, t: build.BuildTarget) -> \ + T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], + T.Tuple[T.MutableMapping[str, File], T.MutableMapping]]: + """ + Splits the target's sources into .vala, .gs, .vapi, and other sources. + Handles both pre-existing and generated sources. + + Returns a tuple (vala, vapi, others) each of which is a dictionary with + the keys being the path to the file (relative to the build directory) + and the value being the object that generated or represents the file. + """ + vala: T.MutableMapping[str, File] = OrderedDict() + vapi: T.MutableMapping[str, File] = OrderedDict() + others: T.MutableMapping[str, File] = OrderedDict() + othersgen: T.MutableMapping[str, File] = OrderedDict() + # Split pre-existing sources + for s in t.get_sources(): + # BuildTarget sources are always mesonlib.File files which are + # either in the source root, or generated with configure_file and + # in the build root + if not isinstance(s, File): + raise InvalidArguments(f'All sources in target {t!r} must be of type mesonlib.File, not {s!r}') + f = s.rel_to_builddir(self.build_to_src) + if s.endswith(('.vala', '.gs')): + srctype = vala + elif s.endswith('.vapi'): + srctype = vapi + else: + srctype = others + srctype[f] = s + # Split generated sources + for gensrc in t.get_generated_sources(): + for s in gensrc.get_outputs(): + f = self.get_target_generated_dir(t, gensrc, s) + if s.endswith(('.vala', '.gs')): + srctype = vala + elif s.endswith('.vapi'): + srctype = vapi + # Generated non-Vala (C/C++) sources. Won't be used for + # generating the Vala compile rule below. + else: + srctype = othersgen + # Duplicate outputs are disastrous + if f in srctype and srctype[f] is not gensrc: + msg = 'Duplicate output {0!r} from {1!r} {2!r}; ' \ + 'conflicts with {0!r} from {4!r} {3!r}' \ + ''.format(f, type(gensrc).__name__, gensrc.name, + srctype[f].name, type(srctype[f]).__name__) + raise InvalidArguments(msg) + # Store 'somefile.vala': GeneratedList (or CustomTarget) + srctype[f] = gensrc + return vala, vapi, (others, othersgen) + + def generate_vala_compile(self, target: build.BuildTarget) -> \ + T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], T.List[str]]: + """Vala is compiled into C. Set up all necessary build steps here.""" + (vala_src, vapi_src, other_src) = self.split_vala_sources(target) + extra_dep_files = [] + if not vala_src: + raise InvalidArguments(f'Vala library {target.name!r} has no Vala or Genie source files.') + + valac = target.compilers['vala'] + c_out_dir = self.get_target_private_dir(target) + # C files generated by valac + vala_c_src: T.List[str] = [] + # Files generated by valac + valac_outputs: T.List = [] + # All sources that are passed to valac on the commandline + all_files = list(vapi_src) + # Passed as --basedir + srcbasedir = os.path.join(self.build_to_src, target.get_subdir()) + for (vala_file, gensrc) in vala_src.items(): + all_files.append(vala_file) + # Figure out where the Vala compiler will write the compiled C file + # + # If the Vala file is in a subdir of the build dir (in our case + # because it was generated/built by something else), and is also + # a subdir of --basedir (because the builddir is in the source + # tree, and the target subdir is the source root), the subdir + # components from the source root till the private builddir will be + # duplicated inside the private builddir. Otherwise, just the + # basename will be used. + # + # If the Vala file is outside the build directory, the paths from + # the --basedir till the subdir will be duplicated inside the + # private builddir. + if isinstance(gensrc, (build.CustomTarget, build.GeneratedList)) or gensrc.is_built: + vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c' + # Check if the vala file is in a subdir of --basedir + abs_srcbasedir = os.path.join(self.environment.get_source_dir(), target.get_subdir()) + abs_vala_file = os.path.join(self.environment.get_build_dir(), vala_file) + if PurePath(os.path.commonpath((abs_srcbasedir, abs_vala_file))) == PurePath(abs_srcbasedir): + vala_c_subdir = PurePath(abs_vala_file).parent.relative_to(abs_srcbasedir) + vala_c_file = os.path.join(str(vala_c_subdir), vala_c_file) + else: + path_to_target = os.path.join(self.build_to_src, target.get_subdir()) + if vala_file.startswith(path_to_target): + vala_c_file = os.path.splitext(os.path.relpath(vala_file, path_to_target))[0] + '.c' + else: + vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c' + # All this will be placed inside the c_out_dir + vala_c_file = os.path.join(c_out_dir, vala_c_file) + vala_c_src.append(vala_c_file) + valac_outputs.append(vala_c_file) + + args = self.generate_basic_compiler_args(target, valac) + args += valac.get_colorout_args(self.environment.coredata.options.get(OptionKey('b_colorout')).value) + # Tell Valac to output everything in our private directory. Sadly this + # means it will also preserve the directory components of Vala sources + # found inside the build tree (generated sources). + args += ['--directory', c_out_dir] + args += ['--basedir', srcbasedir] + if target.is_linkable_target(): + # Library name + args += ['--library', target.name] + # Outputted header + hname = os.path.join(self.get_target_dir(target), target.vala_header) + args += ['--header', hname] + if self.is_unity(target): + # Without this the declarations will get duplicated in the .c + # files and cause a build failure when all of them are + # #include-d in one .c file. + # https://github.com/mesonbuild/meson/issues/1969 + args += ['--use-header'] + valac_outputs.append(hname) + # Outputted vapi file + vapiname = os.path.join(self.get_target_dir(target), target.vala_vapi) + # Force valac to write the vapi and gir files in the target build dir. + # Without this, it will write it inside c_out_dir + args += ['--vapi', os.path.join('..', target.vala_vapi)] + valac_outputs.append(vapiname) + target.outputs += [target.vala_header, target.vala_vapi] + # Install header and vapi to default locations if user requests this + if len(target.install_dir) > 1 and target.install_dir[1] is True: + target.install_dir[1] = self.environment.get_includedir() + if len(target.install_dir) > 2 and target.install_dir[2] is True: + target.install_dir[2] = os.path.join(self.environment.get_datadir(), 'vala', 'vapi') + # Generate GIR if requested + if isinstance(target.vala_gir, str): + girname = os.path.join(self.get_target_dir(target), target.vala_gir) + args += ['--gir', os.path.join('..', target.vala_gir)] + valac_outputs.append(girname) + target.outputs.append(target.vala_gir) + # Install GIR to default location if requested by user + if len(target.install_dir) > 3 and target.install_dir[3] is True: + target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0') + # Detect gresources and add --gresources arguments for each + for gensrc in other_src[1].values(): + if isinstance(gensrc, modules.GResourceTarget): + gres_xml, = self.get_custom_target_sources(gensrc) + args += ['--gresources=' + gres_xml] + extra_args = [] + + for a in target.extra_args.get('vala', []): + if isinstance(a, File): + relname = a.rel_to_builddir(self.build_to_src) + extra_dep_files.append(relname) + extra_args.append(relname) + else: + extra_args.append(a) + dependency_vapis = self.determine_dep_vapis(target) + extra_dep_files += dependency_vapis + args += extra_args + element = NinjaBuildElement(self.all_outputs, valac_outputs, + self.compiler_to_rule_name(valac), + all_files + dependency_vapis) + element.add_item('ARGS', args) + element.add_dep(extra_dep_files) + self.add_build(element) + self.create_target_source_introspection(target, valac, args, all_files, []) + return other_src[0], other_src[1], vala_c_src + + def generate_cython_transpile(self, target: build.BuildTarget) -> \ + T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], T.List[str]]: + """Generate rules for transpiling Cython files to C or C++ + + XXX: Currently only C is handled. + """ + static_sources: T.MutableMapping[str, File] = OrderedDict() + generated_sources: T.MutableMapping[str, File] = OrderedDict() + cython_sources: T.List[str] = [] + + cython = target.compilers['cython'] + + opt_proxy = self.get_compiler_options_for_target(target) + + args: T.List[str] = [] + args += cython.get_always_args() + args += cython.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target)) + args += cython.get_debug_args(self.get_option_for_target(OptionKey('debug'), target)) + args += cython.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target)) + args += cython.get_option_compile_args(opt_proxy) + args += self.build.get_global_args(cython, target.for_machine) + args += self.build.get_project_args(cython, target.subproject, target.for_machine) + + for src in target.get_sources(): + if src.endswith('.pyx'): + output = os.path.join(self.get_target_private_dir(target), f'{src}.c') + args = args.copy() + args += cython.get_output_args(output) + element = NinjaBuildElement( + self.all_outputs, [output], + self.compiler_to_rule_name(cython), + [src.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())]) + element.add_item('ARGS', args) + self.add_build(element) + # TODO: introspection? + cython_sources.append(output) + else: + static_sources[src.rel_to_builddir(self.build_to_src)] = src + + for gen in target.get_generated_sources(): + for ssrc in gen.get_outputs(): + if isinstance(gen, GeneratedList): + ssrc = os.path.join(self.get_target_private_dir(target) , ssrc) + else: + ssrc = os.path.join(gen.get_subdir(), ssrc) + if ssrc.endswith('.pyx'): + args = args.copy() + output = os.path.join(self.get_target_private_dir(target), f'{ssrc}.c') + args += cython.get_output_args(output) + element = NinjaBuildElement( + self.all_outputs, [output], + self.compiler_to_rule_name(cython), + [ssrc]) + element.add_item('ARGS', args) + self.add_build(element) + # TODO: introspection? + cython_sources.append(output) + else: + generated_sources[ssrc] = mesonlib.File.from_built_file(gen.get_subdir(), ssrc) + + return static_sources, generated_sources, cython_sources + + def generate_rust_target(self, target: build.BuildTarget) -> None: + rustc = target.compilers['rust'] + # Rust compiler takes only the main file as input and + # figures out what other files are needed via import + # statements and magic. + base_proxy = self.get_base_options_for_target(target) + args = rustc.compiler_args() + # Compiler args for compiling this target + args += compilers.get_base_compile_args(base_proxy, rustc) + self.generate_generator_list_rules(target) + + # dependencies need to cause a relink, they're not just for odering + deps = [os.path.join(t.subdir, t.get_filename()) for t in target.link_targets] + + orderdeps: T.List[str] = [] + + main_rust_file = None + for i in target.get_sources(): + if not rustc.can_compile(i): + raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.') + if main_rust_file is None: + main_rust_file = i.rel_to_builddir(self.build_to_src) + for g in target.get_generated_sources(): + for i in g.get_outputs(): + if not rustc.can_compile(i): + raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.') + if isinstance(g, GeneratedList): + fname = os.path.join(self.get_target_private_dir(target), i) + else: + fname = os.path.join(g.get_subdir(), i) + if main_rust_file is None: + main_rust_file = fname + orderdeps.append(fname) + if main_rust_file is None: + raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report') + target_name = os.path.join(target.subdir, target.get_filename()) + if isinstance(target, build.Executable): + cratetype = 'bin' + elif hasattr(target, 'rust_crate_type'): + cratetype = target.rust_crate_type + elif isinstance(target, build.SharedLibrary): + cratetype = 'dylib' + elif isinstance(target, build.StaticLibrary): + cratetype = 'rlib' + else: + raise InvalidArguments('Unknown target type for rustc.') + args.extend(['--crate-type', cratetype]) + + # If we're dynamically linking, add those arguments + # + # Rust is super annoying, calling -C link-arg foo does not work, it has + # to be -C link-arg=foo + if cratetype in {'bin', 'dylib'}: + args.extend(rustc.get_linker_always_args()) + + opt_proxy = self.get_compiler_options_for_target(target) + + args += ['--crate-name', target.name] + args += rustc.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target)) + args += rustc.get_debug_args(self.get_option_for_target(OptionKey('debug'), target)) + args += rustc.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target)) + args += rustc.get_option_compile_args(opt_proxy) + args += self.build.get_global_args(rustc, target.for_machine) + args += self.build.get_project_args(rustc, target.subproject, target.for_machine) + depfile = os.path.join(target.subdir, target.name + '.d') + args += ['--emit', f'dep-info={depfile}', '--emit', 'link'] + args += target.get_extra_args('rust') + args += rustc.get_output_args(os.path.join(target.subdir, target.get_filename())) + args += self.environment.coredata.get_external_args(target.for_machine, rustc.language) + linkdirs = mesonlib.OrderedSet() + external_deps = target.external_deps.copy() + for d in target.link_targets: + linkdirs.add(d.subdir) + if d.uses_rust(): + # specify `extern CRATE_NAME=OUTPUT_FILE` for each Rust + # dependency, so that collisions with libraries in rustc's + # sysroot don't cause ambiguity + args += ['--extern', '{}={}'.format(d.name, os.path.join(d.subdir, d.filename))] + elif d.typename == 'static library': + # Rustc doesn't follow Meson's convention that static libraries + # are called .a, and import libraries are .lib, so we have to + # manually handle that. + if rustc.linker.id in {'link', 'lld-link'}: + args += ['-C', f'link-arg={self.get_target_filename_for_linking(d)}'] + else: + args += ['-l', f'static={d.name}'] + external_deps.extend(d.external_deps) + else: + # Rust uses -l for non rust dependencies, but we still need to + # add dylib=foo + args += ['-l', f'dylib={d.name}'] + for e in external_deps: + for a in e.get_link_args(): + if a.endswith(('.dll', '.so', '.dylib')): + dir_, lib = os.path.split(a) + linkdirs.add(dir_) + lib, ext = os.path.splitext(lib) + if lib.startswith('lib'): + lib = lib[3:] + args.extend(['-l', f'dylib={lib}']) + elif a.startswith('-L'): + args.append(a) + elif a.startswith('-l'): + _type = 'static' if e.static else 'dylib' + args.extend(['-l', f'{_type}={a[2:]}']) + for d in linkdirs: + if d == '': + d = '.' + args += ['-L', d] + has_shared_deps = any(isinstance(dep, build.SharedLibrary) for dep in target.get_dependencies()) + if isinstance(target, build.SharedLibrary) or has_shared_deps: + # add prefer-dynamic if any of the Rust libraries we link + # against are dynamic, otherwise we'll end up with + # multiple implementations of crates + args += ['-C', 'prefer-dynamic'] + + # build the usual rpath arguments as well... + + # Set runtime-paths so we can run executables without needing to set + # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows. + if has_path_sep(target.name): + # Target names really should not have slashes in them, but + # unfortunately we did not check for that and some downstream projects + # now have them. Once slashes are forbidden, remove this bit. + target_slashname_workaround_dir = os.path.join(os.path.dirname(target.name), + self.get_target_dir(target)) + else: + target_slashname_workaround_dir = self.get_target_dir(target) + rpath_args, target.rpath_dirs_to_remove = ( + rustc.build_rpath_args(self.environment, + self.environment.get_build_dir(), + target_slashname_workaround_dir, + self.determine_rpath_dirs(target), + target.build_rpath, + target.install_rpath)) + # ... but then add rustc's sysroot to account for rustup + # installations + for rpath_arg in rpath_args: + args += ['-C', 'link-arg=' + rpath_arg + ':' + os.path.join(rustc.get_sysroot(), 'lib')] + compiler_name = self.get_compiler_rule_name('rust', target.for_machine) + element = NinjaBuildElement(self.all_outputs, target_name, compiler_name, main_rust_file) + if orderdeps: + element.add_orderdep(orderdeps) + if deps: + element.add_dep(deps) + element.add_item('ARGS', args) + element.add_item('targetdep', depfile) + element.add_item('cratetype', cratetype) + self.add_build(element) + if isinstance(target, build.SharedLibrary): + self.generate_shsym(target) + self.create_target_source_introspection(target, rustc, args, [main_rust_file], []) + + @staticmethod + def get_rule_suffix(for_machine: MachineChoice) -> str: + return PerMachine('_FOR_BUILD', '')[for_machine] + + @classmethod + def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str: + return '{}_COMPILER{}'.format(lang, cls.get_rule_suffix(for_machine)) + + @classmethod + def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str: + return '{}_PCH{}'.format(lang, cls.get_rule_suffix(for_machine)) + + @classmethod + def compiler_to_rule_name(cls, compiler: Compiler) -> str: + return cls.get_compiler_rule_name(compiler.get_language(), compiler.for_machine) + + @classmethod + def compiler_to_pch_rule_name(cls, compiler: Compiler) -> str: + return cls.get_pch_rule_name(compiler.get_language(), compiler.for_machine) + + def swift_module_file_name(self, target): + return os.path.join(self.get_target_private_dir(target), + self.target_swift_modulename(target) + '.swiftmodule') + + def target_swift_modulename(self, target): + return target.name + + def determine_swift_dep_modules(self, target): + result = [] + for l in target.link_targets: + if self.is_swift_target(l): + result.append(self.swift_module_file_name(l)) + return result + + def get_swift_link_deps(self, target): + result = [] + for l in target.link_targets: + result.append(self.get_target_filename(l)) + return result + + def split_swift_generated_sources(self, target): + all_srcs = self.get_target_generated_sources(target) + srcs = [] + others = [] + for i in all_srcs: + if i.endswith('.swift'): + srcs.append(i) + else: + others.append(i) + return srcs, others + + def generate_swift_target(self, target): + module_name = self.target_swift_modulename(target) + swiftc = target.compilers['swift'] + abssrc = [] + relsrc = [] + abs_headers = [] + header_imports = [] + for i in target.get_sources(): + if swiftc.can_compile(i): + rels = i.rel_to_builddir(self.build_to_src) + abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), rels)) + relsrc.append(rels) + abssrc.append(abss) + elif self.environment.is_header(i): + relh = i.rel_to_builddir(self.build_to_src) + absh = os.path.normpath(os.path.join(self.environment.get_build_dir(), relh)) + abs_headers.append(absh) + header_imports += swiftc.get_header_import_args(absh) + else: + raise InvalidArguments(f'Swift target {target.get_basename()} contains a non-swift source file.') + os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) + compile_args = swiftc.get_compile_only_args() + compile_args += swiftc.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target)) + compile_args += swiftc.get_debug_args(self.get_option_for_target(OptionKey('debug'), target)) + compile_args += swiftc.get_module_args(module_name) + compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine) + compile_args += self.build.get_global_args(swiftc, target.for_machine) + for i in reversed(target.get_include_dirs()): + basedir = i.get_curdir() + for d in i.get_incdirs(): + if d not in ('', '.'): + expdir = os.path.join(basedir, d) + else: + expdir = basedir + srctreedir = os.path.normpath(os.path.join(self.environment.get_build_dir(), self.build_to_src, expdir)) + sargs = swiftc.get_include_args(srctreedir, False) + compile_args += sargs + link_args = swiftc.get_output_args(os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))) + link_args += self.build.get_project_link_args(swiftc, target.subproject, target.for_machine) + link_args += self.build.get_global_link_args(swiftc, target.for_machine) + rundir = self.get_target_private_dir(target) + out_module_name = self.swift_module_file_name(target) + in_module_files = self.determine_swift_dep_modules(target) + abs_module_dirs = self.determine_swift_dep_dirs(target) + module_includes = [] + for x in abs_module_dirs: + module_includes += swiftc.get_include_args(x, False) + link_deps = self.get_swift_link_deps(target) + abs_link_deps = [os.path.join(self.environment.get_build_dir(), x) for x in link_deps] + for d in target.link_targets: + reldir = self.get_target_dir(d) + if reldir == '': + reldir = '.' + link_args += ['-L', os.path.normpath(os.path.join(self.environment.get_build_dir(), reldir))] + (rel_generated, _) = self.split_swift_generated_sources(target) + abs_generated = [os.path.join(self.environment.get_build_dir(), x) for x in rel_generated] + # We need absolute paths because swiftc needs to be invoked in a subdir + # and this is the easiest way about it. + objects = [] # Relative to swift invocation dir + rel_objects = [] # Relative to build.ninja + for i in abssrc + abs_generated: + base = os.path.basename(i) + oname = os.path.splitext(base)[0] + '.o' + objects.append(oname) + rel_objects.append(os.path.join(self.get_target_private_dir(target), oname)) + + rulename = self.get_compiler_rule_name('swift', target.for_machine) + + # Swiftc does not seem to be able to emit objects and module files in one go. + elem = NinjaBuildElement(self.all_outputs, rel_objects, rulename, abssrc) + elem.add_dep(in_module_files + rel_generated) + elem.add_dep(abs_headers) + elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes) + elem.add_item('RUNDIR', rundir) + self.add_build(elem) + elem = NinjaBuildElement(self.all_outputs, out_module_name, + self.get_compiler_rule_name('swift', target.for_machine), + abssrc) + elem.add_dep(in_module_files + rel_generated) + elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args()) + elem.add_item('RUNDIR', rundir) + self.add_build(elem) + if isinstance(target, build.StaticLibrary): + elem = self.generate_link(target, self.get_target_filename(target), + rel_objects, self.build.static_linker[target.for_machine]) + self.add_build(elem) + elif isinstance(target, build.Executable): + elem = NinjaBuildElement(self.all_outputs, self.get_target_filename(target), rulename, []) + elem.add_dep(rel_objects) + elem.add_dep(link_deps) + elem.add_item('ARGS', link_args + swiftc.get_std_exe_link_args() + objects + abs_link_deps) + elem.add_item('RUNDIR', rundir) + self.add_build(elem) + else: + raise MesonException('Swift supports only executable and static library targets.') + # Introspection information + self.create_target_source_introspection(target, swiftc, compile_args + header_imports + module_includes, relsrc, rel_generated) + + def _rsp_options(self, tool: T.Union['Compiler', 'StaticLinker', 'DynamicLinker']) -> T.Dict[str, T.Union[bool, RSPFileSyntax]]: + """Helper method to get rsp options. + + rsp_file_syntax() is only guaranteed to be implemented if + can_linker_accept_rsp() returns True. + """ + options = dict(rspable=tool.can_linker_accept_rsp()) + if options['rspable']: + options['rspfile_quote_style'] = tool.rsp_file_syntax() + return options + + def generate_static_link_rules(self): + num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + if 'java' in self.environment.coredata.compilers.host: + self.generate_java_link() + for for_machine in MachineChoice: + static_linker = self.build.static_linker[for_machine] + if static_linker is None: + continue + rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine)) + cmdlist = [] + args = ['$in'] + # FIXME: Must normalize file names with pathlib.Path before writing + # them out to fix this properly on Windows. See: + # https://github.com/mesonbuild/meson/issues/1517 + # https://github.com/mesonbuild/meson/issues/1526 + if isinstance(static_linker, ArLinker) and not mesonlib.is_windows(): + # `ar` has no options to overwrite archives. It always appends, + # which is never what we want. Delete an existing library first if + # it exists. https://github.com/mesonbuild/meson/issues/1355 + cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix] + cmdlist += static_linker.get_exelist() + cmdlist += ['$LINK_ARGS'] + cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none) + description = 'Linking static target $out' + if num_pools > 0: + pool = 'pool = link_pool' + else: + pool = None + + options = self._rsp_options(static_linker) + self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=pool)) + + def generate_dynamic_link_rules(self): + num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + for for_machine in MachineChoice: + complist = self.environment.coredata.compilers[for_machine] + for langname, compiler in complist.items(): + if langname in {'java', 'vala', 'rust', 'cs', 'cython'}: + continue + rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine)) + command = compiler.get_linker_exelist() + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS'] + description = 'Linking target $out' + if num_pools > 0: + pool = 'pool = link_pool' + else: + pool = None + + options = self._rsp_options(compiler) + self.add_rule(NinjaRule(rule, command, args, description, **options, extra=pool)) + + args = self.environment.get_build_command() + \ + ['--internal', + 'symbolextractor', + self.environment.get_build_dir(), + '$in', + '$IMPLIB', + '$out'] + symrule = 'SHSYM' + symcmd = args + ['$CROSS'] + syndesc = 'Generating symbol file $out' + synstat = 'restat = 1' + self.add_rule(NinjaRule(symrule, symcmd, [], syndesc, extra=synstat)) + + def generate_java_compile_rule(self, compiler): + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Java object $in' + self.add_rule(NinjaRule(rule, command, [], description)) + + def generate_cs_compile_rule(self, compiler: 'CsCompiler') -> None: + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + args = ['$ARGS', '$in'] + description = 'Compiling C Sharp target $out' + self.add_rule(NinjaRule(rule, command, args, description, + rspable=mesonlib.is_windows(), + rspfile_quote_style=compiler.rsp_file_syntax())) + + def generate_vala_compile_rules(self, compiler): + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Vala source $in' + self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1')) + + def generate_cython_compile_rules(self, compiler: 'Compiler') -> None: + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Cython source $in' + self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1')) + + def generate_rust_compile_rules(self, compiler): + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Rust source $in' + depfile = '$targetdep' + depstyle = 'gcc' + self.add_rule(NinjaRule(rule, command, [], description, deps=depstyle, + depfile=depfile)) + + def generate_swift_compile_rules(self, compiler): + rule = self.compiler_to_rule_name(compiler) + full_exe = self.environment.get_build_command() + [ + '--internal', + 'dirchanger', + '$RUNDIR', + ] + invoc = full_exe + compiler.get_exelist() + command = invoc + ['$ARGS', '$in'] + description = 'Compiling Swift source $in' + self.add_rule(NinjaRule(rule, command, [], description)) + + def use_dyndeps_for_fortran(self) -> bool: + '''Use the new Ninja feature for scanning dependencies during build, + rather than up front. Remove this and all old scanning code once Ninja + minimum version is bumped to 1.10.''' + return mesonlib.version_compare(self.ninja_version, '>=1.10.0') + + def generate_fortran_dep_hack(self, crstr: str) -> None: + if self.use_dyndeps_for_fortran(): + return + rule = f'FORTRAN_DEP_HACK{crstr}' + if mesonlib.is_windows(): + cmd = ['cmd', '/C'] + else: + cmd = ['true'] + self.add_rule_comment(NinjaComment('''Workaround for these issues: +https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 +https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) + self.add_rule(NinjaRule(rule, cmd, [], 'Dep hack', extra='restat = 1')) + + def generate_llvm_ir_compile_rule(self, compiler): + if self.created_llvm_ir_rule[compiler.for_machine]: + return + rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) + command = compiler.get_exelist() + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] + description = 'Compiling LLVM IR object $in' + + options = self._rsp_options(compiler) + + self.add_rule(NinjaRule(rule, command, args, description, **options)) + self.created_llvm_ir_rule[compiler.for_machine] = True + + def generate_compile_rule_for(self, langname, compiler): + if langname == 'java': + if self.environment.machines.matches_build_machine(compiler.for_machine): + self.generate_java_compile_rule(compiler) + return + if langname == 'cs': + if self.environment.machines.matches_build_machine(compiler.for_machine): + self.generate_cs_compile_rule(compiler) + return + if langname == 'vala': + self.generate_vala_compile_rules(compiler) + return + if langname == 'rust': + self.generate_rust_compile_rules(compiler) + return + if langname == 'swift': + if self.environment.machines.matches_build_machine(compiler.for_machine): + self.generate_swift_compile_rules(compiler) + return + if langname == 'cython': + self.generate_cython_compile_rules(compiler) + return + crstr = self.get_rule_suffix(compiler.for_machine) + if langname == 'fortran': + self.generate_fortran_dep_hack(crstr) + rule = self.get_compiler_rule_name(langname, compiler.for_machine) + depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none) + command = compiler.get_exelist() + args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] + description = f'Compiling {compiler.get_display_language()} object $out' + if isinstance(compiler, VisualStudioLikeCompiler): + deps = 'msvc' + depfile = None + else: + deps = 'gcc' + depfile = '$DEPFILE' + options = self._rsp_options(compiler) + self.add_rule(NinjaRule(rule, command, args, description, **options, + deps=deps, depfile=depfile)) + + def generate_pch_rule_for(self, langname, compiler): + if langname != 'c' and langname != 'cpp': + return + rule = self.compiler_to_pch_rule_name(compiler) + depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') + + if isinstance(compiler, VisualStudioLikeCompiler): + output = [] + else: + output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in'] + description = 'Precompiling header $in' + if isinstance(compiler, VisualStudioLikeCompiler): + deps = 'msvc' + depfile = None + else: + deps = 'gcc' + depfile = '$DEPFILE' + self.add_rule(NinjaRule(rule, command, [], description, deps=deps, + depfile=depfile)) + + + def generate_scanner_rules(self): + rulename = 'depscan' + if rulename in self.ruledict: + # Scanning command is the same for native and cross compilation. + return + command = self.environment.get_build_command() + \ + ['--internal', 'depscan'] + args = ['$picklefile', '$out', '$in'] + description = 'Module scanner.' + rule = NinjaRule(rulename, command, args, description) + self.add_rule(rule) + + + def generate_compile_rules(self): + for for_machine in MachineChoice: + clist = self.environment.coredata.compilers[for_machine] + for langname, compiler in clist.items(): + if compiler.get_id() == 'clang': + self.generate_llvm_ir_compile_rule(compiler) + self.generate_compile_rule_for(langname, compiler) + self.generate_pch_rule_for(langname, compiler) + + def generate_generator_list_rules(self, target): + # CustomTargets have already written their rules and + # CustomTargetIndexes don't actually get generated, so write rules for + # GeneratedLists here + for genlist in target.get_generated_sources(): + if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)): + continue + self.generate_genlist_for_target(genlist, target) + + def replace_paths(self, target, args, override_subdir=None): + if override_subdir: + source_target_dir = os.path.join(self.build_to_src, override_subdir) + else: + source_target_dir = self.get_target_source_dir(target) + relout = self.get_target_private_dir(target) + args = [x.replace("@SOURCE_DIR@", self.build_to_src).replace("@BUILD_DIR@", relout) + for x in args] + args = [x.replace("@CURRENT_SOURCE_DIR@", source_target_dir) for x in args] + args = [x.replace("@SOURCE_ROOT@", self.build_to_src).replace("@BUILD_ROOT@", '.') + for x in args] + args = [x.replace('\\', '/') for x in args] + return args + + def generate_genlist_for_target(self, genlist, target): + generator = genlist.get_generator() + subdir = genlist.subdir + exe = generator.get_exe() + exe_arr = self.build_target_to_cmd_array(exe) + infilelist = genlist.get_inputs() + outfilelist = genlist.get_outputs() + extra_dependencies = self.get_custom_target_depend_files(genlist) + for i in range(len(infilelist)): + curfile = infilelist[i] + if len(generator.outputs) == 1: + sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) + else: + sole_output = f'{curfile}' + infilename = curfile.rel_to_builddir(self.build_to_src) + base_args = generator.get_arglist(infilename) + outfiles = genlist.get_outputs_for(curfile) + outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles] + if generator.depfile is None: + rulename = 'CUSTOM_COMMAND' + args = base_args + else: + rulename = 'CUSTOM_COMMAND_DEP' + depfilename = generator.get_dep_outname(infilename) + depfile = os.path.join(self.get_target_private_dir(target), depfilename) + args = [x.replace('@DEPFILE@', depfile) for x in base_args] + args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output) + for x in args] + args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist) + # We have consumed output files, so drop them from the list of remaining outputs. + if len(generator.outputs) > 1: + outfilelist = outfilelist[len(generator.outputs):] + args = self.replace_paths(target, args, override_subdir=subdir) + cmdlist = exe_arr + self.replace_extra_args(args, genlist) + cmdlist, reason = self.as_meson_exe_cmdline('generator ' + cmdlist[0], + cmdlist[0], cmdlist[1:], + capture=outfiles[0] if generator.capture else None) + abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(abs_pdir, exist_ok=True) + + elem = NinjaBuildElement(self.all_outputs, outfiles, rulename, infilename) + elem.add_dep([self.get_target_filename(x) for x in generator.depends]) + if generator.depfile is not None: + elem.add_item('DEPFILE', depfile) + if len(extra_dependencies) > 0: + elem.add_dep(extra_dependencies) + + if len(generator.outputs) == 1: + what = f'{sole_output!r}' + else: + # since there are multiple outputs, we log the source that caused the rebuild + what = f'from {sole_output!r}.' + if reason: + reason = f' (wrapped by meson {reason})' + elem.add_item('DESC', f'Generating {what}{reason}.') + + if isinstance(exe, build.BuildTarget): + elem.add_dep(self.get_target_filename(exe)) + elem.add_item('COMMAND', cmdlist) + self.add_build(elem) + + def scan_fortran_module_outputs(self, target): + """ + Find all module and submodule made available in a Fortran code file. + """ + if self.use_dyndeps_for_fortran(): + return + compiler = None + # TODO other compilers + for lang, c in self.environment.coredata.compilers.host.items(): + if lang == 'fortran': + compiler = c + break + if compiler is None: + self.fortran_deps[target.get_basename()] = {} + return + + modre = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE) + submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE) + module_files = {} + submodule_files = {} + for s in target.get_sources(): + # FIXME, does not work for Fortran sources generated by + # custom_target() and generator() as those are run after + # the configuration (configure_file() is OK) + if not compiler.can_compile(s): + continue + filename = s.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) + # Fortran keywords must be ASCII. + with open(filename, encoding='ascii', errors='ignore') as f: + for line in f: + modmatch = modre.match(line) + if modmatch is not None: + modname = modmatch.group(1).lower() + if modname in module_files: + raise InvalidArguments( + f'Namespace collision: module {modname} defined in ' + 'two files {module_files[modname]} and {s}.') + module_files[modname] = s + else: + submodmatch = submodre.match(line) + if submodmatch is not None: + # '_' is arbitrarily used to distinguish submod from mod. + parents = submodmatch.group(1).lower().split(':') + submodname = parents[0] + '_' + submodmatch.group(2).lower() + + if submodname in submodule_files: + raise InvalidArguments( + 'Namespace collision: submodule {submodname} defined in ' + 'two files {submodule_files[submodname]} and {s}.') + submodule_files[submodname] = s + + self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files} + + def get_fortran_deps(self, compiler: FortranCompiler, src: Path, target) -> T.List[str]: + """ + Find all module and submodule needed by a Fortran target + """ + if self.use_dyndeps_for_fortran(): + return [] + + dirname = Path(self.get_target_private_dir(target)) + tdeps = self.fortran_deps[target.get_basename()] + srcdir = Path(self.source_dir) + + mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler) + return mod_files + + def get_no_stdlib_args(self, target, compiler): + if compiler.language in self.build.stdlibs[target.for_machine]: + return compiler.get_no_stdinc_args() + return [] + + def get_no_stdlib_link_args(self, target, linker): + if hasattr(linker, 'language') and linker.language in self.build.stdlibs[target.for_machine]: + return linker.get_no_stdlib_link_args() + return [] + + def get_compile_debugfile_args(self, compiler, target, objfile): + # The way MSVC uses PDB files is documented exactly nowhere so + # the following is what we have been able to decipher via + # reverse engineering. + # + # Each object file gets the path of its PDB file written + # inside it. This can be either the final PDB (for, say, + # foo.exe) or an object pdb (for foo.obj). If the former, then + # each compilation step locks the pdb file for writing, which + # is a bottleneck and object files from one target can not be + # used in a different target. The latter seems to be the + # sensible one (and what Unix does) but there is a catch. If + # you try to use precompiled headers MSVC will error out + # because both source and pch pdbs go in the same file and + # they must be the same. + # + # This means: + # + # - pch files must be compiled anew for every object file (negating + # the entire point of having them in the first place) + # - when using pch, output must go to the target pdb + # + # Since both of these are broken in some way, use the one that + # works for each target. This unfortunately means that you + # can't combine pch and object extraction in a single target. + # + # PDB files also lead to filename collisions. A target foo.exe + # has a corresponding foo.pdb. A shared library foo.dll _also_ + # has pdb file called foo.pdb. So will a static library + # foo.lib, which clobbers both foo.pdb _and_ the dll file's + # export library called foo.lib (by default, currently we name + # them libfoo.a to avoidt this issue). You can give the files + # unique names such as foo_exe.pdb but VC also generates a + # bunch of other files which take their names from the target + # basename (i.e. "foo") and stomp on each other. + # + # CMake solves this problem by doing two things. First of all + # static libraries do not generate pdb files at + # all. Presumably you don't need them and VC is smart enough + # to look up the original data when linking (speculation, not + # tested). The second solution is that you can only have + # target named "foo" as an exe, shared lib _or_ static + # lib. This makes filename collisions not happen. The downside + # is that you can't have an executable foo that uses a shared + # library libfoo.so, which is a common idiom on Unix. + # + # If you feel that the above is completely wrong and all of + # this is actually doable, please send patches. + + if target.has_pch(): + tfilename = self.get_target_filename_abs(target) + return compiler.get_compile_debugfile_args(tfilename, pch=True) + else: + return compiler.get_compile_debugfile_args(objfile, pch=False) + + def get_link_debugfile_name(self, linker, target, outname): + return linker.get_link_debugfile_name(outname) + + def get_link_debugfile_args(self, linker, target, outname): + return linker.get_link_debugfile_args(outname) + + def generate_llvm_ir_compile(self, target, src): + base_proxy = self.get_base_options_for_target(target) + compiler = get_compiler_for_source(target.compilers.values(), src) + commands = compiler.compiler_args() + # Compiler args for compiling this target + commands += compilers.get_base_compile_args(base_proxy, compiler) + if isinstance(src, File): + if src.is_built: + src_filename = os.path.join(src.subdir, src.fname) + else: + src_filename = src.fname + elif os.path.isabs(src): + src_filename = os.path.basename(src) + else: + src_filename = src + obj_basename = self.canonicalize_filename(src_filename) + rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) + rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix() + commands += self.get_compile_debugfile_args(compiler, target, rel_obj) + if isinstance(src, File) and src.is_built: + rel_src = src.fname + elif isinstance(src, File): + rel_src = src.rel_to_builddir(self.build_to_src) + else: + raise InvalidArguments(f'Invalid source type: {src!r}') + # Write the Ninja build command + compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) + element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) + element.add_item('ARGS', commands) + self.add_build(element) + return (rel_obj, rel_src) + + @lru_cache(maxsize=None) + def generate_inc_dir(self, compiler: 'Compiler', d: str, basedir: str, is_system: bool) -> \ + T.Tuple['ImmutableListProtocol[str]', 'ImmutableListProtocol[str]']: + # Avoid superfluous '/.' at the end of paths when d is '.' + if d not in ('', '.'): + expdir = os.path.normpath(os.path.join(basedir, d)) + else: + expdir = basedir + srctreedir = os.path.normpath(os.path.join(self.build_to_src, expdir)) + sargs = compiler.get_include_args(srctreedir, is_system) + # There may be include dirs where a build directory has not been + # created for some source dir. For example if someone does this: + # + # inc = include_directories('foo/bar/baz') + # + # But never subdir()s into the actual dir. + if os.path.isdir(os.path.join(self.environment.get_build_dir(), expdir)): + bargs = compiler.get_include_args(expdir, is_system) + else: + bargs = [] + return (sargs, bargs) + + def _generate_single_compile(self, target: build.BuildTarget, compiler: 'Compiler', + is_generated: bool = False) -> 'CompilerArgs': + commands = self._generate_single_compile_base_args(target, compiler) + commands += self._generate_single_compile_target_args(target, compiler, is_generated) + return commands + + def _generate_single_compile_base_args(self, target: build.BuildTarget, compiler: 'Compiler') -> 'CompilerArgs': + base_proxy = self.get_base_options_for_target(target) + # Create an empty commands list, and start adding arguments from + # various sources in the order in which they must override each other + commands = compiler.compiler_args() + # Start with symbol visibility. + commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility) + # Add compiler args for compiling this target derived from 'base' build + # options passed on the command-line, in default_options, etc. + # These have the lowest priority. + commands += compilers.get_base_compile_args(base_proxy, + compiler) + return commands + + @lru_cache(maxsize=None) + def _generate_single_compile_target_args(self, target: build.BuildTarget, compiler: 'Compiler', + is_generated: bool = False) -> 'ImmutableListProtocol[str]': + # The code generated by valac is usually crap and has tons of unused + # variables and such, so disable warnings for Vala C sources. + no_warn_args = (is_generated == 'vala') + # Add compiler args and include paths from several sources; defaults, + # build options, external dependencies, etc. + commands = self.generate_basic_compiler_args(target, compiler, no_warn_args) + # Add custom target dirs as includes automatically, but before + # target-specific include directories. + if target.implicit_include_directories: + commands += self.get_custom_target_dir_include_args(target, compiler) + # Add include dirs from the `include_directories:` kwarg on the target + # and from `include_directories:` of internal deps of the target. + # + # Target include dirs should override internal deps include dirs. + # This is handled in BuildTarget.process_kwargs() + # + # Include dirs from internal deps should override include dirs from + # external deps and must maintain the order in which they are specified. + # Hence, we must reverse the list so that the order is preserved. + for i in reversed(target.get_include_dirs()): + basedir = i.get_curdir() + # We should iterate include dirs in reversed orders because + # -Ipath will add to begin of array. And without reverse + # flags will be added in reversed order. + for d in reversed(i.get_incdirs()): + # Add source subdir first so that the build subdir overrides it + (compile_obj, includeargs) = self.generate_inc_dir(compiler, d, basedir, i.is_system) + commands += compile_obj + commands += includeargs + for d in i.get_extra_build_dirs(): + commands += compiler.get_include_args(d, i.is_system) + # Add per-target compile args, f.ex, `c_args : ['-DFOO']`. We set these + # near the end since these are supposed to override everything else. + commands += self.escape_extra_args(compiler, + target.get_extra_args(compiler.get_language())) + + # D specific additional flags + if compiler.language == 'd': + commands += compiler.get_feature_args(target.d_features, self.build_to_src) + + # Add source dir and build dir. Project-specific and target-specific + # include paths must override per-target compile args, include paths + # from external dependencies, internal dependencies, and from + # per-target `include_directories:` + # + # We prefer headers in the build dir over the source dir since, for + # instance, the user might have an srcdir == builddir Autotools build + # in their source tree. Many projects that are moving to Meson have + # both Meson and Autotools in parallel as part of the transition. + if target.implicit_include_directories: + commands += self.get_source_dir_include_args(target, compiler) + if target.implicit_include_directories: + commands += self.get_build_dir_include_args(target, compiler) + # Finally add the private dir for the target to the include path. This + # must override everything else and must be the final path added. + commands += compiler.get_include_args(self.get_target_private_dir(target), False) + return commands + + def generate_single_compile(self, target, src, is_generated=False, header_deps=None, order_deps=None): + """ + Compiles C/C++, ObjC/ObjC++, Fortran, and D sources + """ + header_deps = header_deps if header_deps is not None else [] + order_deps = order_deps if order_deps is not None else [] + + if isinstance(src, str) and src.endswith('.h'): + raise AssertionError(f'BUG: sources should not contain headers {src!r}') + + compiler = get_compiler_for_source(target.compilers.values(), src) + commands = self._generate_single_compile_base_args(target, compiler) + + # Include PCH header as first thing as it must be the first one or it will be + # ignored by gcc https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100462 + if self.environment.coredata.options.get(OptionKey('b_pch')) and is_generated != 'pch': + commands += self.get_pch_include_args(compiler, target) + + commands += self._generate_single_compile_target_args(target, compiler, is_generated) + commands = commands.compiler.compiler_args(commands) + + # Create introspection information + if is_generated is False: + self.create_target_source_introspection(target, compiler, commands, [src], []) + else: + self.create_target_source_introspection(target, compiler, commands, [], [src]) + + build_dir = self.environment.get_build_dir() + if isinstance(src, File): + rel_src = src.rel_to_builddir(self.build_to_src) + if os.path.isabs(rel_src): + # Source files may not be from the source directory if they originate in source-only libraries, + # so we can't assert that the absolute path is anywhere in particular. + if src.is_built: + assert rel_src.startswith(build_dir) + rel_src = rel_src[len(build_dir) + 1:] + elif is_generated: + raise AssertionError(f'BUG: broken generated source file handling for {src!r}') + else: + raise InvalidArguments(f'Invalid source type: {src!r}') + obj_basename = self.object_filename_from_source(target, src) + rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) + dep_file = compiler.depfile_for_object(rel_obj) + + # Add MSVC debug file generation compile flags: /Fd /FS + commands += self.get_compile_debugfile_args(compiler, target, rel_obj) + + # PCH handling + if self.environment.coredata.options.get(OptionKey('b_pch')): + pchlist = target.get_pch(compiler.language) + else: + pchlist = [] + if not pchlist: + pch_dep = [] + elif compiler.id == 'intel': + pch_dep = [] + else: + arr = [] + i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0])) + arr.append(i) + pch_dep = arr + + compiler_name = self.compiler_to_rule_name(compiler) + extra_deps = [] + if compiler.get_language() == 'fortran': + # Can't read source file to scan for deps if it's generated later + # at build-time. Skip scanning for deps, and just set the module + # outdir argument instead. + # https://github.com/mesonbuild/meson/issues/1348 + if not is_generated: + abs_src = Path(build_dir) / rel_src + extra_deps += self.get_fortran_deps(compiler, abs_src, target) + if not self.use_dyndeps_for_fortran(): + # Dependency hack. Remove once multiple outputs in Ninja is fixed: + # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 + for modname, srcfile in self.fortran_deps[target.get_basename()].items(): + modfile = os.path.join(self.get_target_private_dir(target), + compiler.module_name_to_filename(modname)) + + if srcfile == src: + crstr = self.get_rule_suffix(target.for_machine) + depelem = NinjaBuildElement(self.all_outputs, + modfile, + 'FORTRAN_DEP_HACK' + crstr, + rel_obj) + self.add_build(depelem) + commands += compiler.get_module_outdir_args(self.get_target_private_dir(target)) + + element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) + self.add_header_deps(target, element, header_deps) + for d in extra_deps: + element.add_dep(d) + for d in order_deps: + if isinstance(d, File): + d = d.rel_to_builddir(self.build_to_src) + elif not self.has_dir_part(d): + d = os.path.join(self.get_target_private_dir(target), d) + element.add_orderdep(d) + element.add_dep(pch_dep) + for i in self.get_fortran_orderdeps(target, compiler): + element.add_orderdep(i) + element.add_item('DEPFILE', dep_file) + element.add_item('ARGS', commands) + + self.add_dependency_scanner_entries_to_element(target, compiler, element, src) + self.add_build(element) + assert(isinstance(rel_obj, str)) + assert(isinstance(rel_src, str)) + return (rel_obj, rel_src.replace('\\', '/')) + + def add_dependency_scanner_entries_to_element(self, target, compiler, element, src): + if not self.should_use_dyndeps_for_target(target): + return + extension = os.path.splitext(src.fname)[1][1:] + if not (extension.lower() in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']): + return + dep_scan_file = self.get_dep_scan_file_for(target) + element.add_item('dyndep', dep_scan_file) + element.add_orderdep(dep_scan_file) + + def get_dep_scan_file_for(self, target): + return os.path.join(self.get_target_private_dir(target), 'depscan.dd') + + def add_header_deps(self, target, ninja_element, header_deps): + for d in header_deps: + if isinstance(d, File): + d = d.rel_to_builddir(self.build_to_src) + elif not self.has_dir_part(d): + d = os.path.join(self.get_target_private_dir(target), d) + ninja_element.add_dep(d) + + def has_dir_part(self, fname): + # FIXME FIXME: The usage of this is a terrible and unreliable hack + if isinstance(fname, File): + return fname.subdir != '' + return has_path_sep(fname) + + # Fortran is a bit weird (again). When you link against a library, just compiling a source file + # requires the mod files that are output when single files are built. To do this right we would need to + # scan all inputs and write out explicit deps for each file. That is stoo slow and too much effort so + # instead just have an ordered dependency on the library. This ensures all required mod files are created. + # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that + # produce incorrect dep files but such is life. + def get_fortran_orderdeps(self, target, compiler): + if compiler.language != 'fortran': + return [] + return [ + os.path.join(self.get_target_dir(lt), lt.get_filename()) + for lt in itertools.chain(target.link_targets, target.link_whole_targets) + ] + + def generate_msvc_pch_command(self, target, compiler, pch): + header = pch[0] + pchname = compiler.get_pch_name(header) + dst = os.path.join(self.get_target_private_dir(target), pchname) + + commands = [] + commands += self.generate_basic_compiler_args(target, compiler) + + if len(pch) == 1: + # Auto generate PCH. + source = self.create_msvc_pch_implementation(target, compiler.get_language(), pch[0]) + pch_header_dir = os.path.dirname(os.path.join(self.build_to_src, target.get_source_subdir(), header)) + commands += compiler.get_include_args(pch_header_dir, False) + else: + source = os.path.join(self.build_to_src, target.get_source_subdir(), pch[1]) + + just_name = os.path.basename(header) + (objname, pch_args) = compiler.gen_pch_args(just_name, source, dst) + commands += pch_args + commands += self._generate_single_compile(target, compiler) + commands += self.get_compile_debugfile_args(compiler, target, objname) + dep = dst + '.' + compiler.get_depfile_suffix() + return commands, dep, dst, [objname], source + + def generate_gcc_pch_command(self, target, compiler, pch): + commands = self._generate_single_compile(target, compiler) + if pch.split('.')[-1] == 'h' and compiler.language == 'cpp': + # Explicitly compile pch headers as C++. If Clang is invoked in C++ mode, it actually warns if + # this option is not set, and for gcc it also makes sense to use it. + commands += ['-x', 'c++-header'] + dst = os.path.join(self.get_target_private_dir(target), + os.path.basename(pch) + '.' + compiler.get_pch_suffix()) + dep = dst + '.' + compiler.get_depfile_suffix() + return commands, dep, dst, [] # Gcc does not create an object file during pch generation. + + def generate_pch(self, target, header_deps=None): + header_deps = header_deps if header_deps is not None else [] + pch_objects = [] + for lang in ['c', 'cpp']: + pch = target.get_pch(lang) + if not pch: + continue + if not has_path_sep(pch[0]) or not has_path_sep(pch[-1]): + msg = f'Precompiled header of {target.get_basename()!r} must not be in the same ' \ + 'directory as source, please put it in a subdirectory.' + raise InvalidArguments(msg) + compiler = target.compilers[lang] + if isinstance(compiler, VisualStudioLikeCompiler): + (commands, dep, dst, objs, src) = self.generate_msvc_pch_command(target, compiler, pch) + extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) + elif compiler.id == 'intel': + # Intel generates on target generation + continue + else: + src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) + (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0]) + extradep = None + pch_objects += objs + rulename = self.compiler_to_pch_rule_name(compiler) + elem = NinjaBuildElement(self.all_outputs, dst, rulename, src) + if extradep is not None: + elem.add_dep(extradep) + self.add_header_deps(target, elem, header_deps) + elem.add_item('ARGS', commands) + elem.add_item('DEPFILE', dep) + self.add_build(elem) + return pch_objects + + def get_target_shsym_filename(self, target): + # Always name the .symbols file after the primary build output because it always exists + targetdir = self.get_target_private_dir(target) + return os.path.join(targetdir, target.get_filename() + '.symbols') + + def generate_shsym(self, target): + target_file = self.get_target_filename(target) + symname = self.get_target_shsym_filename(target) + elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file) + # The library we will actually link to, which is an import library on Windows (not the DLL) + elem.add_item('IMPLIB', self.get_target_filename_for_linking(target)) + if self.environment.is_cross_build(): + elem.add_item('CROSS', '--cross-host=' + self.environment.machines[target.for_machine].system) + self.add_build(elem) + + def get_import_filename(self, target): + return os.path.join(self.get_target_dir(target), target.import_filename) + + def get_target_type_link_args(self, target, linker): + commands = [] + if isinstance(target, build.Executable): + # Currently only used with the Swift compiler to add '-emit-executable' + commands += linker.get_std_exe_link_args() + # If export_dynamic, add the appropriate linker arguments + if target.export_dynamic: + commands += linker.gen_export_dynamic_link_args(self.environment) + # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio) + if target.import_filename: + commands += linker.gen_import_library_args(self.get_import_filename(target)) + if target.pie: + commands += linker.get_pie_link_args() + elif isinstance(target, build.SharedLibrary): + if isinstance(target, build.SharedModule): + options = self.environment.coredata.options + commands += linker.get_std_shared_module_link_args(options) + else: + commands += linker.get_std_shared_lib_link_args() + # All shared libraries are PIC + commands += linker.get_pic_args() + # Add -Wl,-soname arguments on Linux, -install_name on OS X + commands += linker.get_soname_args( + self.environment, target.prefix, target.name, target.suffix, + target.soversion, target.darwin_versions, + isinstance(target, build.SharedModule)) + # This is only visited when building for Windows using either GCC or Visual Studio + if target.vs_module_defs and hasattr(linker, 'gen_vs_module_defs_args'): + commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src)) + # This is only visited when building for Windows using either GCC or Visual Studio + if target.import_filename: + commands += linker.gen_import_library_args(self.get_import_filename(target)) + elif isinstance(target, build.StaticLibrary): + commands += linker.get_std_link_args() + else: + raise RuntimeError('Unknown build target type.') + return commands + + def get_target_type_link_args_post_dependencies(self, target, linker): + commands = [] + if isinstance(target, build.Executable): + # If gui_app is significant on this platform, add the appropriate linker arguments. + # Unfortunately this can't be done in get_target_type_link_args, because some misguided + # libraries (such as SDL2) add -mwindows to their link flags. + m = self.environment.machines[target.for_machine] + + if m.is_windows() or m.is_cygwin(): + if target.gui_app is not None: + commands += linker.get_gui_app_args(target.gui_app) + else: + commands += linker.get_win_subsystem_args(target.win_subsystem) + return commands + + def get_link_whole_args(self, linker, target): + use_custom = False + if isinstance(linker, mixins.visualstudio.MSVCCompiler): + # Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2 + # (incidentally, the "linker" here actually refers to cl.exe) + if mesonlib.version_compare(linker.version, '<19.00.23918'): + use_custom = True + + if use_custom: + objects_from_static_libs: T.List[ExtractedObjects] = [] + for dep in target.link_whole_targets: + l = dep.extract_all_objects(False) + objects_from_static_libs += self.determine_ext_objs(l, '') + objects_from_static_libs.extend(self.flatten_object_list(dep)) + + return objects_from_static_libs + else: + target_args = self.build_target_link_arguments(linker, target.link_whole_targets) + return linker.get_link_whole_for(target_args) if target_args else [] + + @lru_cache(maxsize=None) + def guess_library_absolute_path(self, linker, libname, search_dirs, patterns) -> Path: + for d in search_dirs: + for p in patterns: + trial = CCompiler._get_trials_from_pattern(p, d, libname) + if not trial: + continue + trial = CCompiler._get_file_from_list(self.environment, trial) + if not trial: + continue + # Return the first result + return trial + + def guess_external_link_dependencies(self, linker, target, commands, internal): + # Ideally the linker would generate dependency information that could be used. + # But that has 2 problems: + # * currently ld can not create dependency information in a way that ninja can use: + # https://sourceware.org/bugzilla/show_bug.cgi?id=22843 + # * Meson optimizes libraries from the same build using the symbol extractor. + # Just letting ninja use ld generated dependencies would undo this optimization. + search_dirs = OrderedSet() + libs = OrderedSet() + absolute_libs = [] + + build_dir = self.environment.get_build_dir() + # the following loop sometimes consumes two items from command in one pass + it = iter(linker.native_args_to_unix(commands)) + for item in it: + if item in internal and not item.startswith('-'): + continue + + if item.startswith('-L'): + if len(item) > 2: + path = item[2:] + else: + try: + path = next(it) + except StopIteration: + mlog.warning("Generated linker command has -L argument without following path") + break + if not os.path.isabs(path): + path = os.path.join(build_dir, path) + search_dirs.add(path) + elif item.startswith('-l'): + if len(item) > 2: + lib = item[2:] + else: + try: + lib = next(it) + except StopIteration: + mlog.warning("Generated linker command has '-l' argument without following library name") + break + libs.add(lib) + elif os.path.isabs(item) and self.environment.is_library(item) and os.path.isfile(item): + absolute_libs.append(item) + + guessed_dependencies = [] + # TODO The get_library_naming requirement currently excludes link targets that use d or fortran as their main linker + try: + static_patterns = linker.get_library_naming(self.environment, LibType.STATIC, strict=True) + shared_patterns = linker.get_library_naming(self.environment, LibType.SHARED, strict=True) + search_dirs = tuple(search_dirs) + tuple(linker.get_library_dirs(self.environment)) + for libname in libs: + # be conservative and record most likely shared and static resolution, because we don't know exactly + # which one the linker will prefer + staticlibs = self.guess_library_absolute_path(linker, libname, + search_dirs, static_patterns) + sharedlibs = self.guess_library_absolute_path(linker, libname, + search_dirs, shared_patterns) + if staticlibs: + guessed_dependencies.append(staticlibs.resolve().as_posix()) + if sharedlibs: + guessed_dependencies.append(sharedlibs.resolve().as_posix()) + except (mesonlib.MesonException, AttributeError) as e: + if 'get_library_naming' not in str(e): + raise + + return guessed_dependencies + absolute_libs + + def generate_prelink(self, target, obj_list): + assert(isinstance(target, build.StaticLibrary)) + prelink_name = os.path.join(self.get_target_private_dir(target), target.name + '-prelink.o') + elem = NinjaBuildElement(self.all_outputs, [prelink_name], 'CUSTOM_COMMAND', obj_list) + + prelinker = target.get_prelinker() + cmd = prelinker.exelist[:] + cmd += prelinker.get_prelink_args(prelink_name, obj_list) + + cmd = self.replace_paths(target, cmd) + elem.add_item('COMMAND', cmd) + elem.add_item('description', f'Prelinking {prelink_name}.') + self.add_build(elem) + return [prelink_name] + + def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None): + extra_args = extra_args if extra_args is not None else [] + stdlib_args = stdlib_args if stdlib_args is not None else [] + implicit_outs = [] + if isinstance(target, build.StaticLibrary): + linker_base = 'STATIC' + else: + linker_base = linker.get_language() # Fixme. + if isinstance(target, build.SharedLibrary): + self.generate_shsym(target) + crstr = self.get_rule_suffix(target.for_machine) + linker_rule = linker_base + '_LINKER' + crstr + # Create an empty commands list, and start adding link arguments from + # various sources in the order in which they must override each other + # starting from hard-coded defaults followed by build options and so on. + # + # Once all the linker options have been passed, we will start passing + # libraries and library paths from internal and external sources. + commands = linker.compiler_args() + # First, the trivial ones that are impossible to override. + # + # Add linker args for linking this target derived from 'base' build + # options passed on the command-line, in default_options, etc. + # These have the lowest priority. + if isinstance(target, build.StaticLibrary): + commands += linker.get_base_link_args(self.get_base_options_for_target(target)) + else: + commands += compilers.get_base_link_args(self.get_base_options_for_target(target), + linker, + isinstance(target, build.SharedModule)) + # Add -nostdlib if needed; can't be overridden + commands += self.get_no_stdlib_link_args(target, linker) + # Add things like /NOLOGO; usually can't be overridden + commands += linker.get_linker_always_args() + # Add buildtype linker args: optimization level, etc. + commands += linker.get_buildtype_linker_args(self.get_option_for_target(OptionKey('buildtype'), target)) + # Add /DEBUG and the pdb filename when using MSVC + if self.get_option_for_target(OptionKey('debug'), target): + commands += self.get_link_debugfile_args(linker, target, outname) + debugfile = self.get_link_debugfile_name(linker, target, outname) + if debugfile is not None: + implicit_outs += [debugfile] + # Add link args specific to this BuildTarget type, such as soname args, + # PIC, import library generation, etc. + commands += self.get_target_type_link_args(target, linker) + # Archives that are copied wholesale in the result. Must be before any + # other link targets so missing symbols from whole archives are found in those. + if not isinstance(target, build.StaticLibrary): + commands += self.get_link_whole_args(linker, target) + + if not isinstance(target, build.StaticLibrary): + # Add link args added using add_project_link_arguments() + commands += self.build.get_project_link_args(linker, target.subproject, target.for_machine) + # Add link args added using add_global_link_arguments() + # These override per-project link arguments + commands += self.build.get_global_link_args(linker, target.for_machine) + # Link args added from the env: LDFLAGS. We want these to override + # all the defaults but not the per-target link args. + commands += self.environment.coredata.get_external_link_args(target.for_machine, linker.get_language()) + + # Now we will add libraries and library paths from various sources + + # Set runtime-paths so we can run executables without needing to set + # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows. + if has_path_sep(target.name): + # Target names really should not have slashes in them, but + # unfortunately we did not check for that and some downstream projects + # now have them. Once slashes are forbidden, remove this bit. + target_slashname_workaround_dir = os.path.join( + os.path.dirname(target.name), + self.get_target_dir(target)) + else: + target_slashname_workaround_dir = self.get_target_dir(target) + (rpath_args, target.rpath_dirs_to_remove) = ( + linker.build_rpath_args(self.environment, + self.environment.get_build_dir(), + target_slashname_workaround_dir, + self.determine_rpath_dirs(target), + target.build_rpath, + target.install_rpath)) + commands += rpath_args + + # Add link args to link to all internal libraries (link_with:) and + # internal dependencies needed by this target. + if linker_base == 'STATIC': + # Link arguments of static libraries are not put in the command + # line of the library. They are instead appended to the command + # line where the static library is used. + dependencies = [] + else: + dependencies = target.get_dependencies() + internal = self.build_target_link_arguments(linker, dependencies) + commands += internal + # Only non-static built targets need link args and link dependencies + if not isinstance(target, build.StaticLibrary): + # For 'automagic' deps: Boost and GTest. Also dependency('threads'). + # pkg-config puts the thread flags itself via `Cflags:` + + commands += linker.get_target_link_args(target) + # External deps must be last because target link libraries may depend on them. + for dep in target.get_external_deps(): + # Extend without reordering or de-dup to preserve `-L -l` sets + # https://github.com/mesonbuild/meson/issues/1718 + commands.extend_preserving_lflags(linker.get_dependency_link_args(dep)) + for d in target.get_dependencies(): + if isinstance(d, build.StaticLibrary): + for dep in d.get_external_deps(): + commands.extend_preserving_lflags(linker.get_dependency_link_args(dep)) + + # Add link args specific to this BuildTarget type that must not be overridden by dependencies + commands += self.get_target_type_link_args_post_dependencies(target, linker) + + # Add link args for c_* or cpp_* build options. Currently this only + # adds c_winlibs and cpp_winlibs when building for Windows. This needs + # to be after all internal and external libraries so that unresolved + # symbols from those can be found here. This is needed when the + # *_winlibs that we want to link to are static mingw64 libraries. + if isinstance(linker, Compiler): + # The static linker doesn't know what language it is building, so we + # don't know what option. Fortunately, it doesn't care to see the + # language-specific options either. + # + # We shouldn't check whether we are making a static library, because + # in the LTO case we do use a real compiler here. + commands += linker.get_option_link_args(self.environment.coredata.options) + + dep_targets = [] + dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal)) + + # Add libraries generated by custom targets + custom_target_libraries = self.get_custom_target_provided_libraries(target) + commands += extra_args + commands += custom_target_libraries + commands += stdlib_args # Standard library arguments go last, because they never depend on anything. + dep_targets.extend([self.get_dependency_filename(t) for t in dependencies]) + dep_targets.extend([self.get_dependency_filename(t) + for t in target.link_depends]) + elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs) + elem.add_dep(dep_targets + custom_target_libraries) + elem.add_item('LINK_ARGS', commands) + return elem + + def get_dependency_filename(self, t): + if isinstance(t, build.SharedLibrary): + return self.get_target_shsym_filename(t) + elif isinstance(t, mesonlib.File): + if t.is_built: + return t.relative_name() + else: + return t.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) + return self.get_target_filename(t) + + def generate_shlib_aliases(self, target, outdir): + aliases = target.get_aliases() + for alias, to in aliases.items(): + aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias) + try: + os.remove(aliasfile) + except Exception: + pass + try: + os.symlink(to, aliasfile) + except NotImplementedError: + mlog.debug("Library versioning disabled because symlinks are not supported.") + except OSError: + mlog.debug("Library versioning disabled because we do not have symlink creation privileges.") + + def generate_custom_target_clean(self, trees): + e = NinjaBuildElement(self.all_outputs, 'meson-clean-ctlist', 'CUSTOM_COMMAND', 'PHONY') + d = CleanTrees(self.environment.get_build_dir(), trees) + d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat') + e.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'cleantrees', d_file]) + e.add_item('description', 'Cleaning custom target directories') + self.add_build(e) + # Alias that runs the target defined above + self.create_target_alias('meson-clean-ctlist') + # Write out the data file passed to the script + with open(d_file, 'wb') as ofile: + pickle.dump(d, ofile) + return 'clean-ctlist' + + def generate_gcov_clean(self): + gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY') + gcno_elem.add_item('COMMAND', mesonlib.get_meson_command() + ['--internal', 'delwithsuffix', '.', 'gcno']) + gcno_elem.add_item('description', 'Deleting gcno files') + self.add_build(gcno_elem) + # Alias that runs the target defined above + self.create_target_alias('meson-clean-gcno') + + gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY') + gcda_elem.add_item('COMMAND', mesonlib.get_meson_command() + ['--internal', 'delwithsuffix', '.', 'gcda']) + gcda_elem.add_item('description', 'Deleting gcda files') + self.add_build(gcda_elem) + # Alias that runs the target defined above + self.create_target_alias('meson-clean-gcda') + + def get_user_option_args(self): + cmds = [] + for (k, v) in self.environment.coredata.options.items(): + if k.is_project(): + cmds.append('-D' + str(k) + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower())) + # The order of these arguments must be the same between runs of Meson + # to ensure reproducible output. The order we pass them shouldn't + # affect behavior in any other way. + return sorted(cmds) + + def generate_dist(self): + elem = NinjaBuildElement(self.all_outputs, 'meson-dist', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('DESC', 'Creating source packages') + elem.add_item('COMMAND', self.environment.get_build_command() + ['dist']) + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the target defined above + self.create_target_alias('meson-dist') + + def generate_scanbuild(self): + if not environment.detect_scanbuild(): + return + if ('', 'scan-build') in self.build.run_target_names: + return + cmd = self.environment.get_build_command() + \ + ['--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir] + \ + self.environment.get_build_command() + self.get_user_option_args() + elem = NinjaBuildElement(self.all_outputs, 'meson-scan-build', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the target defined above + self.create_target_alias('meson-scan-build') + + def generate_clangtool(self, name, extra_arg=None): + target_name = 'clang-' + name + extra_args = [] + if extra_arg: + target_name += f'-{extra_arg}' + extra_args.append(f'--{extra_arg}') + if not os.path.exists(os.path.join(self.environment.source_dir, '.clang-' + name)) and \ + not os.path.exists(os.path.join(self.environment.source_dir, '_clang-' + name)): + return + if target_name in self.all_outputs: + return + if ('', target_name) in self.build.run_target_names: + return + cmd = self.environment.get_build_command() + \ + ['--internal', 'clang' + name, self.environment.source_dir, self.environment.build_dir] + \ + extra_args + elem = NinjaBuildElement(self.all_outputs, 'meson-' + target_name, 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + self.create_target_alias('meson-' + target_name) + + def generate_clangformat(self): + if not environment.detect_clangformat(): + return + self.generate_clangtool('format') + self.generate_clangtool('format', 'check') + + def generate_clangtidy(self): + import shutil + if not shutil.which('clang-tidy'): + return + self.generate_clangtool('tidy') + + def generate_tags(self, tool, target_name): + import shutil + if not shutil.which(tool): + return + if ('', target_name) in self.build.run_target_names: + return + if target_name in self.all_outputs: + return + cmd = self.environment.get_build_command() + \ + ['--internal', 'tags', tool, self.environment.source_dir] + elem = NinjaBuildElement(self.all_outputs, 'meson-' + target_name, 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the target defined above + self.create_target_alias('meson-' + target_name) + + # For things like scan-build and other helper tools we might have. + def generate_utils(self): + self.generate_scanbuild() + self.generate_clangformat() + self.generate_clangtidy() + self.generate_tags('etags', 'TAGS') + self.generate_tags('ctags', 'ctags') + self.generate_tags('cscope', 'cscope') + cmd = self.environment.get_build_command() + ['--internal', 'uninstall'] + elem = NinjaBuildElement(self.all_outputs, 'meson-uninstall', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + # Alias that runs the target defined above + self.create_target_alias('meson-uninstall') + + def generate_ending(self): + targetlist = [] + for t in self.get_build_by_default_targets().values(): + # Add the first output of each target to the 'all' target so that + # they are all built + targetlist.append(os.path.join(self.get_target_dir(t), t.get_outputs()[0])) + + elem = NinjaBuildElement(self.all_outputs, 'all', 'phony', targetlist) + self.add_build(elem) + + elem = NinjaBuildElement(self.all_outputs, 'meson-clean', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', self.ninja_command + ['-t', 'clean']) + elem.add_item('description', 'Cleaning') + # Alias that runs the above-defined meson-clean target + self.create_target_alias('meson-clean') + + # If we have custom targets in this project, add all their outputs to + # the list that is passed to the `cleantrees.py` script. The script + # will manually delete all custom_target outputs that are directories + # instead of files. This is needed because on platforms other than + # Windows, Ninja only deletes directories while cleaning if they are + # empty. https://github.com/mesonbuild/meson/issues/1220 + ctlist = [] + for t in self.build.get_targets().values(): + if isinstance(t, build.CustomTarget): + # Create a list of all custom target outputs + for o in t.get_outputs(): + ctlist.append(os.path.join(self.get_target_dir(t), o)) + if ctlist: + elem.add_dep(self.generate_custom_target_clean(ctlist)) + + if OptionKey('b_coverage') in self.environment.coredata.options and \ + self.environment.coredata.options[OptionKey('b_coverage')].value: + self.generate_gcov_clean() + elem.add_dep('clean-gcda') + elem.add_dep('clean-gcno') + self.add_build(elem) + + deps = self.get_regen_filelist() + elem = NinjaBuildElement(self.all_outputs, 'build.ninja', 'REGENERATE_BUILD', deps) + elem.add_item('pool', 'console') + self.add_build(elem) + + elem = NinjaBuildElement(self.all_outputs, 'reconfigure', 'REGENERATE_BUILD', 'PHONY') + elem.add_item('pool', 'console') + self.add_build(elem) + + elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '') + self.add_build(elem) + + def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: + if target_id not in self.introspection_data or len(self.introspection_data[target_id]) == 0: + return super().get_introspection_data(target_id, target) + + result = [] + for i in self.introspection_data[target_id].values(): + result += [i] + return result + + +def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compiler) -> T.List[str]: + """ + scan a Fortran file for dependencies. Needs to be distinct from target + to allow for recursion induced by `include` statements.er + + It makes a number of assumptions, including + + * `use`, `module`, `submodule` name is not on a continuation line + + Regex + ----- + + * `incre` works for `#include "foo.f90"` and `include "foo.f90"` + * `usere` works for legacy and Fortran 2003 `use` statements + * `submodre` is for Fortran >= 2008 `submodule` + """ + + incre = re.compile(FORTRAN_INCLUDE_PAT, re.IGNORECASE) + usere = re.compile(FORTRAN_USE_PAT, re.IGNORECASE) + submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE) + + mod_files = [] + src = Path(src) + with src.open(encoding='ascii', errors='ignore') as f: + for line in f: + # included files + incmatch = incre.match(line) + if incmatch is not None: + incfile = src.parent / incmatch.group(1) + # NOTE: src.parent is most general, in particular for CMake subproject with Fortran file + # having an `include 'foo.f'` statement. + if incfile.suffix.lower()[1:] in compiler.file_suffixes: + mod_files.extend(_scan_fortran_file_deps(incfile, srcdir, dirname, tdeps, compiler)) + # modules + usematch = usere.match(line) + if usematch is not None: + usename = usematch.group(1).lower() + if usename == 'intrinsic': # this keeps the regex simpler + continue + if usename not in tdeps: + # The module is not provided by any source file. This + # is due to: + # a) missing file/typo/etc + # b) using a module provided by the compiler, such as + # OpenMP + # There's no easy way to tell which is which (that I + # know of) so just ignore this and go on. Ideally we + # would print a warning message to the user but this is + # a common occurrence, which would lead to lots of + # distracting noise. + continue + srcfile = srcdir / tdeps[usename].fname # type: Path + if not srcfile.is_file(): + if srcfile.name != src.name: # generated source file + pass + else: # subproject + continue + elif srcfile.samefile(src): # self-reference + continue + + mod_name = compiler.module_name_to_filename(usename) + mod_files.append(str(dirname / mod_name)) + else: # submodules + submodmatch = submodre.match(line) + if submodmatch is not None: + parents = submodmatch.group(1).lower().split(':') + assert len(parents) in (1, 2), ( + 'submodule ancestry must be specified as' + f' ancestor:parent but Meson found {parents}') + + ancestor_child = '_'.join(parents) + if ancestor_child not in tdeps: + raise MesonException("submodule {} relies on ancestor module {} that was not found.".format(submodmatch.group(2).lower(), ancestor_child.split('_')[0])) + submodsrcfile = srcdir / tdeps[ancestor_child].fname # type: Path + if not submodsrcfile.is_file(): + if submodsrcfile.name != src.name: # generated source file + pass + else: # subproject + continue + elif submodsrcfile.samefile(src): # self-reference + continue + mod_name = compiler.module_name_to_filename(ancestor_child) + mod_files.append(str(dirname / mod_name)) + return mod_files diff --git a/meson/mesonbuild/backend/vs2010backend.py b/meson/mesonbuild/backend/vs2010backend.py new file mode 100644 index 000000000..6e6e47fdb --- /dev/null +++ b/meson/mesonbuild/backend/vs2010backend.py @@ -0,0 +1,1562 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import os +import xml.dom.minidom +import xml.etree.ElementTree as ET +import uuid +import typing as T +from pathlib import Path, PurePath + +from . import backends +from .. import build +from .. import dependencies +from .. import mlog +from .. import compilers +from ..interpreter import Interpreter +from ..mesonlib import ( + File, MesonException, python_command, replace_if_different, OptionKey, version_compare, +) +from ..environment import Environment, build_filename + +def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + vs_version = os.getenv('VisualStudioVersion', None) + vs_install_dir = os.getenv('VSINSTALLDIR', None) + if not vs_install_dir: + raise MesonException('Could not detect Visual Studio: Environment variable VSINSTALLDIR is not set!\n' + 'Are you running meson from the Visual Studio Developer Command Prompt?') + # VisualStudioVersion is set since Visual Studio 11.0, but sometimes + # vcvarsall.bat doesn't set it, so also use VSINSTALLDIR + if vs_version == '11.0' or 'Visual Studio 11' in vs_install_dir: + from mesonbuild.backend.vs2012backend import Vs2012Backend + return Vs2012Backend(build, interpreter) + if vs_version == '12.0' or 'Visual Studio 12' in vs_install_dir: + from mesonbuild.backend.vs2013backend import Vs2013Backend + return Vs2013Backend(build, interpreter) + if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir: + from mesonbuild.backend.vs2015backend import Vs2015Backend + return Vs2015Backend(build, interpreter) + if vs_version == '15.0' or 'Visual Studio 17' in vs_install_dir or \ + 'Visual Studio\\2017' in vs_install_dir: + from mesonbuild.backend.vs2017backend import Vs2017Backend + return Vs2017Backend(build, interpreter) + if vs_version == '16.0' or 'Visual Studio 19' in vs_install_dir or \ + 'Visual Studio\\2019' in vs_install_dir: + from mesonbuild.backend.vs2019backend import Vs2019Backend + return Vs2019Backend(build, interpreter) + if 'Visual Studio 10.0' in vs_install_dir: + return Vs2010Backend(build, interpreter) + raise MesonException('Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n' + 'Please specify the exact backend to use.'.format(vs_version, vs_install_dir)) + +def split_o_flags_args(args): + """ + Splits any /O args and returns them. Does not take care of flags overriding + previous ones. Skips non-O flag arguments. + + ['/Ox', '/Ob1'] returns ['/Ox', '/Ob1'] + ['/Oxj', '/MP'] returns ['/Ox', '/Oj'] + """ + o_flags = [] + for arg in args: + if not arg.startswith('/O'): + continue + flags = list(arg[2:]) + # Assume that this one can't be clumped with the others since it takes + # an argument itself + if 'b' in flags: + o_flags.append(arg) + else: + o_flags += ['/O' + f for f in flags] + return o_flags + +def generate_guid_from_path(path, path_type): + return str(uuid.uuid5(uuid.NAMESPACE_URL, 'meson-vs-' + path_type + ':' + str(path))).upper() + +class Vs2010Backend(backends.Backend): + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'vs2010' + self.project_file_version = '10.0.30319.1' + self.platform_toolset = None + self.vs_version = '2010' + self.windows_target_platform_version = None + self.subdirs = {} + self.handled_target_deps = {} + + def get_target_private_dir(self, target): + return os.path.join(self.get_target_dir(target), target.get_id()) + + def generate_custom_generator_commands(self, target, parent_node): + generator_output_files = [] + custom_target_include_dirs = [] + custom_target_output_files = [] + target_private_dir = self.relpath(self.get_target_private_dir(target), self.get_target_dir(target)) + down = self.target_to_build_root(target) + for genlist in target.get_generated_sources(): + if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)): + for i in genlist.get_outputs(): + # Path to the generated source from the current vcxproj dir via the build root + ipath = os.path.join(down, self.get_target_dir(genlist), i) + custom_target_output_files.append(ipath) + idir = self.relpath(self.get_target_dir(genlist), self.get_target_dir(target)) + if idir not in custom_target_include_dirs: + custom_target_include_dirs.append(idir) + else: + generator = genlist.get_generator() + exe = generator.get_exe() + infilelist = genlist.get_inputs() + outfilelist = genlist.get_outputs() + source_dir = os.path.join(down, self.build_to_src, genlist.subdir) + exe_arr = self.build_target_to_cmd_array(exe) + idgroup = ET.SubElement(parent_node, 'ItemGroup') + for i in range(len(infilelist)): + if len(infilelist) == len(outfilelist): + sole_output = os.path.join(target_private_dir, outfilelist[i]) + else: + sole_output = '' + curfile = infilelist[i] + infilename = os.path.join(down, curfile.rel_to_builddir(self.build_to_src)) + deps = self.get_custom_target_depend_files(genlist, True) + base_args = generator.get_arglist(infilename) + outfiles_rel = genlist.get_outputs_for(curfile) + outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel] + generator_output_files += outfiles + args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output) + for x in base_args] + args = self.replace_outputs(args, target_private_dir, outfiles_rel) + args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir()) + .replace("@BUILD_DIR@", target_private_dir) + for x in args] + args = [x.replace("@CURRENT_SOURCE_DIR@", source_dir) for x in args] + args = [x.replace("@SOURCE_ROOT@", self.environment.get_source_dir()) + .replace("@BUILD_ROOT@", self.environment.get_build_dir()) + for x in args] + args = [x.replace('\\', '/') for x in args] + cmd = exe_arr + self.replace_extra_args(args, genlist) + # Always use a wrapper because MSBuild eats random characters when + # there are many arguments. + tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + cmd, _ = self.as_meson_exe_cmdline( + 'generator ' + cmd[0], + cmd[0], + cmd[1:], + workdir=tdir_abs, + capture=outfiles[0] if generator.capture else None, + force_serialize=True + ) + deps = cmd[-1:] + deps + abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(abs_pdir, exist_ok=True) + cbs = ET.SubElement(idgroup, 'CustomBuild', Include=infilename) + ET.SubElement(cbs, 'Command').text = ' '.join(self.quote_arguments(cmd)) + ET.SubElement(cbs, 'Outputs').text = ';'.join(outfiles) + ET.SubElement(cbs, 'AdditionalInputs').text = ';'.join(deps) + return generator_output_files, custom_target_output_files, custom_target_include_dirs + + def generate(self): + target_machine = self.interpreter.builtin['target_machine'].cpu_family_method(None, None) + if target_machine == '64' or target_machine == 'x86_64': + # amd64 or x86_64 + self.platform = 'x64' + elif target_machine == 'x86': + # x86 + self.platform = 'Win32' + elif target_machine == 'aarch64' or target_machine == 'arm64': + target_cpu = self.interpreter.builtin['target_machine'].cpu_method(None, None) + if target_cpu == 'arm64ec': + self.platform = 'arm64ec' + else: + self.platform = 'arm64' + elif 'arm' in target_machine.lower(): + self.platform = 'ARM' + else: + raise MesonException('Unsupported Visual Studio platform: ' + target_machine) + self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype')) + self.optimization = self.environment.coredata.get_option(OptionKey('optimization')) + self.debug = self.environment.coredata.get_option(OptionKey('debug')) + try: + self.sanitize = self.environment.coredata.get_option(OptionKey('b_sanitize')) + except MesonException: + self.sanitize = 'none' + sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln') + projlist = self.generate_projects() + self.gen_testproj('RUN_TESTS', os.path.join(self.environment.get_build_dir(), 'RUN_TESTS.vcxproj')) + self.gen_installproj('RUN_INSTALL', os.path.join(self.environment.get_build_dir(), 'RUN_INSTALL.vcxproj')) + self.gen_regenproj('REGEN', os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')) + self.generate_solution(sln_filename, projlist) + self.generate_regen_info() + Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir()) + + @staticmethod + def get_regen_stampfile(build_dir: str) -> None: + return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp') + + @staticmethod + def touch_regen_timestamp(build_dir: str) -> None: + with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w', encoding='utf-8'): + pass + + def get_vcvars_command(self): + has_arch_values = 'VSCMD_ARG_TGT_ARCH' in os.environ and 'VSCMD_ARG_HOST_ARCH' in os.environ + + # Use vcvarsall.bat if we found it. + if 'VCINSTALLDIR' in os.environ: + vs_version = os.environ['VisualStudioVersion'] \ + if 'VisualStudioVersion' in os.environ else None + relative_path = 'Auxiliary\\Build\\' if vs_version is not None and vs_version >= '15.0' else '' + script_path = os.environ['VCINSTALLDIR'] + relative_path + 'vcvarsall.bat' + if os.path.exists(script_path): + if has_arch_values: + target_arch = os.environ['VSCMD_ARG_TGT_ARCH'] + host_arch = os.environ['VSCMD_ARG_HOST_ARCH'] + else: + target_arch = os.environ.get('Platform', 'x86') + host_arch = target_arch + arch = host_arch + '_' + target_arch if host_arch != target_arch else target_arch + return f'"{script_path}" {arch}' + + # Otherwise try the VS2017 Developer Command Prompt. + if 'VS150COMNTOOLS' in os.environ and has_arch_values: + script_path = os.environ['VS150COMNTOOLS'] + 'VsDevCmd.bat' + if os.path.exists(script_path): + return '"%s" -arch=%s -host_arch=%s' % \ + (script_path, os.environ['VSCMD_ARG_TGT_ARCH'], os.environ['VSCMD_ARG_HOST_ARCH']) + return '' + + def get_obj_target_deps(self, obj_list): + result = {} + for o in obj_list: + if isinstance(o, build.ExtractedObjects): + result[o.target.get_id()] = o.target + return result.items() + + def get_target_deps(self, t, recursive=False): + all_deps = {} + for target in t.values(): + if isinstance(target, build.CustomTarget): + for d in target.get_target_dependencies(): + all_deps[d.get_id()] = d + elif isinstance(target, build.RunTarget): + for d in target.get_dependencies(): + all_deps[d.get_id()] = d + elif isinstance(target, build.BuildTarget): + for ldep in target.link_targets: + if isinstance(ldep, build.CustomTargetIndex): + all_deps[ldep.get_id()] = ldep.target + else: + all_deps[ldep.get_id()] = ldep + for ldep in target.link_whole_targets: + if isinstance(ldep, build.CustomTargetIndex): + all_deps[ldep.get_id()] = ldep.target + else: + all_deps[ldep.get_id()] = ldep + for obj_id, objdep in self.get_obj_target_deps(target.objects): + all_deps[obj_id] = objdep + else: + raise MesonException('Unknown target type for target %s' % target) + + for gendep in target.get_generated_sources(): + if isinstance(gendep, build.CustomTarget): + all_deps[gendep.get_id()] = gendep + elif isinstance(gendep, build.CustomTargetIndex): + all_deps[gendep.target.get_id()] = gendep.target + else: + generator = gendep.get_generator() + gen_exe = generator.get_exe() + if isinstance(gen_exe, build.Executable): + all_deps[gen_exe.get_id()] = gen_exe + for d in generator.depends: + if isinstance(d, build.CustomTargetIndex): + all_deps[d.get_id()] = d.target + else: + all_deps[d.get_id()] = d + + if not t or not recursive: + return all_deps + ret = self.get_target_deps(all_deps, recursive) + ret.update(all_deps) + return ret + + def generate_solution_dirs(self, ofile, parents): + prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n' + iterpaths = reversed(parents) + # Skip first path + next(iterpaths) + for path in iterpaths: + if path not in self.subdirs: + basename = path.name + identifier = generate_guid_from_path(path, 'subdir') + # top-level directories have None as their parent_dir + parent_dir = path.parent + parent_identifier = self.subdirs[parent_dir][0] \ + if parent_dir != PurePath('.') else None + self.subdirs[path] = (identifier, parent_identifier) + prj_line = prj_templ % ( + self.environment.coredata.lang_guids['directory'], + basename, basename, self.subdirs[path][0]) + ofile.write(prj_line) + ofile.write('EndProject\n') + + def generate_solution(self, sln_filename, projlist): + default_projlist = self.get_build_by_default_targets() + sln_filename_tmp = sln_filename + '~' + with open(sln_filename_tmp, 'w', encoding='utf-8') as ofile: + ofile.write('Microsoft Visual Studio Solution File, Format ' + 'Version 11.00\n') + ofile.write('# Visual Studio ' + self.vs_version + '\n') + prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n' + for prj in projlist: + coredata = self.environment.coredata + if coredata.get_option(OptionKey('layout')) == 'mirror': + self.generate_solution_dirs(ofile, prj[1].parents) + target = self.build.targets[prj[0]] + lang = 'default' + if hasattr(target, 'compilers') and target.compilers: + for lang_out in target.compilers.keys(): + lang = lang_out + break + prj_line = prj_templ % ( + self.environment.coredata.lang_guids[lang], + prj[0], prj[1], prj[2]) + ofile.write(prj_line) + target_dict = {target.get_id(): target} + # Get recursive deps + recursive_deps = self.get_target_deps( + target_dict, recursive=True) + ofile.write('EndProject\n') + for dep, target in recursive_deps.items(): + if prj[0] in default_projlist: + default_projlist[dep] = target + + test_line = prj_templ % (self.environment.coredata.lang_guids['default'], + 'RUN_TESTS', 'RUN_TESTS.vcxproj', + self.environment.coredata.test_guid) + ofile.write(test_line) + ofile.write('EndProject\n') + regen_line = prj_templ % (self.environment.coredata.lang_guids['default'], + 'REGEN', 'REGEN.vcxproj', + self.environment.coredata.regen_guid) + ofile.write(regen_line) + ofile.write('EndProject\n') + install_line = prj_templ % (self.environment.coredata.lang_guids['default'], + 'RUN_INSTALL', 'RUN_INSTALL.vcxproj', + self.environment.coredata.install_guid) + ofile.write(install_line) + ofile.write('EndProject\n') + ofile.write('Global\n') + ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = ' + 'preSolution\n') + ofile.write('\t\t%s|%s = %s|%s\n' % + (self.buildtype, self.platform, self.buildtype, + self.platform)) + ofile.write('\tEndGlobalSection\n') + ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = ' + 'postSolution\n') + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (self.environment.coredata.regen_guid, self.buildtype, + self.platform, self.buildtype, self.platform)) + ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' % + (self.environment.coredata.regen_guid, self.buildtype, + self.platform, self.buildtype, self.platform)) + # Create the solution configuration + for p in projlist: + # Add to the list of projects in this solution + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (p[2], self.buildtype, self.platform, + self.buildtype, self.platform)) + if p[0] in default_projlist and \ + not isinstance(self.build.targets[p[0]], build.RunTarget): + # Add to the list of projects to be built + ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' % + (p[2], self.buildtype, self.platform, + self.buildtype, self.platform)) + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (self.environment.coredata.test_guid, self.buildtype, + self.platform, self.buildtype, self.platform)) + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (self.environment.coredata.install_guid, self.buildtype, + self.platform, self.buildtype, self.platform)) + ofile.write('\tEndGlobalSection\n') + ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n') + ofile.write('\t\tHideSolutionNode = FALSE\n') + ofile.write('\tEndGlobalSection\n') + if self.subdirs: + ofile.write('\tGlobalSection(NestedProjects) = ' + 'preSolution\n') + for p in projlist: + if p[1].parent != PurePath('.'): + ofile.write("\t\t{{{}}} = {{{}}}\n".format(p[2], self.subdirs[p[1].parent][0])) + for subdir in self.subdirs.values(): + if subdir[1]: + ofile.write("\t\t{{{}}} = {{{}}}\n".format(subdir[0], subdir[1])) + ofile.write('\tEndGlobalSection\n') + ofile.write('EndGlobal\n') + replace_if_different(sln_filename, sln_filename_tmp) + + def generate_projects(self): + startup_project = self.environment.coredata.options[OptionKey('backend_startup_project')].value + projlist = [] + startup_idx = 0 + for (i, (name, target)) in enumerate(self.build.targets.items()): + if startup_project and startup_project == target.get_basename(): + startup_idx = i + outdir = Path( + self.environment.get_build_dir(), + self.get_target_dir(target) + ) + outdir.mkdir(exist_ok=True, parents=True) + fname = name + '.vcxproj' + target_dir = PurePath(self.get_target_dir(target)) + relname = target_dir / fname + projfile_path = outdir / fname + proj_uuid = self.environment.coredata.target_guids[name] + self.gen_vcxproj(target, str(projfile_path), proj_uuid) + projlist.append((name, relname, proj_uuid)) + + # Put the startup project first in the project list + if startup_idx: + projlist = [projlist[startup_idx]] + projlist[0:startup_idx] + projlist[startup_idx + 1:-1] + + return projlist + + def split_sources(self, srclist): + sources = [] + headers = [] + objects = [] + languages = [] + for i in srclist: + if self.environment.is_header(i): + headers.append(i) + elif self.environment.is_object(i): + objects.append(i) + elif self.environment.is_source(i): + sources.append(i) + lang = self.lang_from_source_file(i) + if lang not in languages: + languages.append(lang) + elif self.environment.is_library(i): + pass + else: + # Everything that is not an object or source file is considered a header. + headers.append(i) + return sources, headers, objects, languages + + def target_to_build_root(self, target): + if self.get_target_dir(target) == '': + return '' + + directories = os.path.normpath(self.get_target_dir(target)).split(os.sep) + return os.sep.join(['..'] * len(directories)) + + def quote_arguments(self, arr): + return ['"%s"' % i for i in arr] + + def add_project_reference(self, root, include, projid, link_outputs=False): + ig = ET.SubElement(root, 'ItemGroup') + pref = ET.SubElement(ig, 'ProjectReference', Include=include) + ET.SubElement(pref, 'Project').text = '{%s}' % projid + if not link_outputs: + # Do not link in generated .lib files from dependencies automatically. + # We only use the dependencies for ordering and link in the generated + # objects and .lib files manually. + ET.SubElement(pref, 'LinkLibraryDependencies').text = 'false' + + def add_target_deps(self, root, target): + target_dict = {target.get_id(): target} + for dep in self.get_target_deps(target_dict).values(): + if dep.get_id() in self.handled_target_deps[target.get_id()]: + # This dependency was already handled manually. + continue + relpath = self.get_target_dir_relative_to(dep, target) + vcxproj = os.path.join(relpath, dep.get_id() + '.vcxproj') + tid = self.environment.coredata.target_guids[dep.get_id()] + self.add_project_reference(root, vcxproj, tid) + + def create_basic_crap(self, target, guid): + project_name = target.name + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) + prjconf = ET.SubElement(confitems, 'ProjectConfiguration', + {'Include': self.buildtype + '|' + self.platform}) + p = ET.SubElement(prjconf, 'Configuration') + p.text = self.buildtype + pl = ET.SubElement(prjconf, 'Platform') + pl.text = self.platform + globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') + guidelem = ET.SubElement(globalgroup, 'ProjectGuid') + guidelem.text = '{%s}' % guid + kw = ET.SubElement(globalgroup, 'Keyword') + kw.text = self.platform + 'Proj' + p = ET.SubElement(globalgroup, 'Platform') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') + pname.text = project_name + if self.windows_target_platform_version: + ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props') + type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') + ET.SubElement(type_config, 'ConfigurationType') + ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte' + ET.SubElement(type_config, 'UseOfMfc').text = 'false' + if self.platform_toolset: + ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + direlem = ET.SubElement(root, 'PropertyGroup') + fver = ET.SubElement(direlem, '_ProjectFileVersion') + fver.text = self.project_file_version + outdir = ET.SubElement(direlem, 'OutDir') + outdir.text = '.\\' + intdir = ET.SubElement(direlem, 'IntDir') + intdir.text = target.get_id() + '\\' + tname = ET.SubElement(direlem, 'TargetName') + tname.text = target.name + return root + + def gen_run_target_vcxproj(self, target, ofname, guid): + root = self.create_basic_crap(target, guid) + if not target.command: + # FIXME: This is an alias target that doesn't run any command, there + # is probably a better way than running a this dummy command. + cmd_raw = python_command + ['-c', 'exit'] + else: + _, _, cmd_raw = self.eval_custom_target_command(target) + depend_files = self.get_custom_target_depend_files(target) + target_env = self.get_run_target_env(target) + wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd_raw[1:], + force_serialize=True, env=target_env, + verbose=True) + self.add_custom_build(root, 'run_target', ' '.join(self.quote_arguments(wrapper_cmd)), + deps=depend_files) + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_custom_target_vcxproj(self, target, ofname, guid): + root = self.create_basic_crap(target, guid) + # We need to always use absolute paths because our invocation is always + # from the target dir, not the build root. + target.absolute_paths = True + (srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True) + depend_files = self.get_custom_target_depend_files(target, True) + # Always use a wrapper because MSBuild eats random characters when + # there are many arguments. + tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + extra_bdeps = target.get_transitive_build_target_deps() + wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:], + # All targets run from the target dir + workdir=tdir_abs, + extra_bdeps=extra_bdeps, + capture=ofilenames[0] if target.capture else None, + feed=srcs[0] if target.feed else None, + force_serialize=True, + env=target.env) + if target.build_always_stale: + # Use a nonexistent file to always consider the target out-of-date. + ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(), + 'outofdate.file'))] + self.add_custom_build(root, 'custom_target', ' '.join(self.quote_arguments(wrapper_cmd)), + deps=wrapper_cmd[-1:] + srcs + depend_files, outputs=ofilenames, + verify_files=not target.build_always_stale) + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.generate_custom_generator_commands(target, root) + self.add_regen_dependency(root) + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + @classmethod + def lang_from_source_file(cls, src): + ext = src.split('.')[-1] + if ext in compilers.c_suffixes: + return 'c' + if ext in compilers.cpp_suffixes: + return 'cpp' + raise MesonException('Could not guess language from source file %s.' % src) + + def add_pch(self, pch_sources, lang, inc_cl): + if lang in pch_sources: + self.use_pch(pch_sources, lang, inc_cl) + + def create_pch(self, pch_sources, lang, inc_cl): + pch = ET.SubElement(inc_cl, 'PrecompiledHeader') + pch.text = 'Create' + self.add_pch_files(pch_sources, lang, inc_cl) + + def use_pch(self, pch_sources, lang, inc_cl): + pch = ET.SubElement(inc_cl, 'PrecompiledHeader') + pch.text = 'Use' + header = self.add_pch_files(pch_sources, lang, inc_cl) + pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles') + pch_include.text = header + ';%(ForcedIncludeFiles)' + + def add_pch_files(self, pch_sources, lang, inc_cl): + header = os.path.basename(pch_sources[lang][0]) + pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile') + # When USING PCHs, MSVC will not do the regular include + # directory lookup, but simply use a string match to find the + # PCH to use. That means the #include directive must match the + # pch_file.text used during PCH CREATION verbatim. + # When CREATING a PCH, MSVC will do the include directory + # lookup to find the actual PCH header to use. Thus, the PCH + # header must either be in the include_directories of the target + # or be in the same directory as the PCH implementation. + pch_file.text = header + pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile') + pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % lang + return header + + def is_argument_with_msbuild_xml_entry(self, entry): + # Remove arguments that have a top level XML entry so + # they are not used twice. + # FIXME add args as needed. + if entry[1:].startswith('fsanitize'): + return True + return entry[1:].startswith('M') + + def add_additional_options(self, lang, parent_node, file_args): + args = [] + for arg in file_args[lang].to_native(): + if self.is_argument_with_msbuild_xml_entry(arg): + continue + if arg == '%(AdditionalOptions)': + args.append(arg) + else: + args.append(self.escape_additional_option(arg)) + ET.SubElement(parent_node, "AdditionalOptions").text = ' '.join(args) + + def add_preprocessor_defines(self, lang, parent_node, file_defines): + defines = [] + for define in file_defines[lang]: + if define == '%(PreprocessorDefinitions)': + defines.append(define) + else: + defines.append(self.escape_preprocessor_define(define)) + ET.SubElement(parent_node, "PreprocessorDefinitions").text = ';'.join(defines) + + def add_include_dirs(self, lang, parent_node, file_inc_dirs): + dirs = file_inc_dirs[lang] + ET.SubElement(parent_node, "AdditionalIncludeDirectories").text = ';'.join(dirs) + + @staticmethod + def has_objects(objects, additional_objects, generated_objects): + # Ignore generated objects, those are automatically used by MSBuild because they are part of + # the CustomBuild Outputs. + return len(objects) + len(additional_objects) > 0 + + @staticmethod + def add_generated_objects(node, generated_objects): + # Do not add generated objects to project file. Those are automatically used by MSBuild, because + # they are part of the CustomBuild Outputs. + return + + @staticmethod + def escape_preprocessor_define(define): + # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx + table = str.maketrans({'%': '%25', '$': '%24', '@': '%40', + "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', + # We need to escape backslash because it'll be un-escaped by + # Windows during process creation when it parses the arguments + # Basically, this converts `\` to `\\`. + '\\': '\\\\'}) + return define.translate(table) + + @staticmethod + def escape_additional_option(option): + # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx + table = str.maketrans({'%': '%25', '$': '%24', '@': '%40', + "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20'}) + option = option.translate(table) + # Since we're surrounding the option with ", if it ends in \ that will + # escape the " when the process arguments are parsed and the starting + # " will not terminate. So we escape it if that's the case. I'm not + # kidding, this is how escaping works for process args on Windows. + if option.endswith('\\'): + option += '\\' + return f'"{option}"' + + @staticmethod + def split_link_args(args): + """ + Split a list of link arguments into three lists: + * library search paths + * library filenames (or paths) + * other link arguments + """ + lpaths = [] + libs = [] + other = [] + for arg in args: + if arg.startswith('/LIBPATH:'): + lpath = arg[9:] + # De-dup library search paths by removing older entries when + # a new one is found. This is necessary because unlike other + # search paths such as the include path, the library is + # searched for in the newest (right-most) search path first. + if lpath in lpaths: + lpaths.remove(lpath) + lpaths.append(lpath) + elif arg.startswith(('/', '-')): + other.append(arg) + # It's ok if we miss libraries with non-standard extensions here. + # They will go into the general link arguments. + elif arg.endswith('.lib') or arg.endswith('.a'): + # De-dup + if arg not in libs: + libs.append(arg) + else: + other.append(arg) + return lpaths, libs, other + + def _get_cl_compiler(self, target): + for lang, c in target.compilers.items(): + if lang in ('c', 'cpp'): + return c + # No source files, only objects, but we still need a compiler, so + # return a found compiler + if len(target.objects) > 0: + for lang, c in self.environment.coredata.compilers[target.for_machine].items(): + if lang in ('c', 'cpp'): + return c + raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.') + + def _prettyprint_vcxproj_xml(self, tree, ofname): + ofname_tmp = ofname + '~' + tree.write(ofname_tmp, encoding='utf-8', xml_declaration=True) + + # ElementTree can not do prettyprinting so do it manually + doc = xml.dom.minidom.parse(ofname_tmp) + with open(ofname_tmp, 'w', encoding='utf-8') as of: + of.write(doc.toprettyxml()) + replace_if_different(ofname, ofname_tmp) + + def gen_vcxproj(self, target, ofname, guid): + mlog.debug('Generating vcxproj %s.' % target.name) + subsystem = 'Windows' + self.handled_target_deps[target.get_id()] = [] + if isinstance(target, build.Executable): + conftype = 'Application' + if target.gui_app is not None: + if not target.gui_app: + subsystem = 'Console' + else: + # If someone knows how to set the version properly, + # please send a patch. + subsystem = target.win_subsystem.split(',')[0] + elif isinstance(target, build.StaticLibrary): + conftype = 'StaticLibrary' + elif isinstance(target, build.SharedLibrary): + conftype = 'DynamicLibrary' + elif isinstance(target, build.CustomTarget): + return self.gen_custom_target_vcxproj(target, ofname, guid) + elif isinstance(target, build.RunTarget): + return self.gen_run_target_vcxproj(target, ofname, guid) + else: + raise MesonException('Unknown target type for %s' % target.get_basename()) + # Prefix to use to access the build root from the vcxproj dir + down = self.target_to_build_root(target) + # Prefix to use to access the source tree's root from the vcxproj dir + proj_to_src_root = os.path.join(down, self.build_to_src) + # Prefix to use to access the source tree's subdir from the vcxproj dir + proj_to_src_dir = os.path.join(proj_to_src_root, self.get_target_dir(target)) + (sources, headers, objects, languages) = self.split_sources(target.sources) + if self.is_unity(target): + sources = self.generate_unity_files(target, sources) + compiler = self._get_cl_compiler(target) + build_args = compiler.get_buildtype_args(self.buildtype) + build_args += compiler.get_optimization_args(self.optimization) + build_args += compiler.get_debug_args(self.debug) + build_args += compiler.sanitizer_compile_args(self.sanitize) + buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype) + vscrt_type = self.environment.coredata.options[OptionKey('b_vscrt')] + project_name = target.name + target_name = target.name + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) + prjconf = ET.SubElement(confitems, 'ProjectConfiguration', + {'Include': self.buildtype + '|' + self.platform}) + p = ET.SubElement(prjconf, 'Configuration') + p.text = self.buildtype + pl = ET.SubElement(prjconf, 'Platform') + pl.text = self.platform + # Globals + globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') + guidelem = ET.SubElement(globalgroup, 'ProjectGuid') + guidelem.text = '{%s}' % guid + kw = ET.SubElement(globalgroup, 'Keyword') + kw.text = self.platform + 'Proj' + ns = ET.SubElement(globalgroup, 'RootNamespace') + ns.text = target_name + p = ET.SubElement(globalgroup, 'Platform') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') + pname.text = project_name + if self.windows_target_platform_version: + ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props') + # Start configuration + type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') + ET.SubElement(type_config, 'ConfigurationType').text = conftype + ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte' + if self.platform_toolset: + ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset + # FIXME: Meson's LTO support needs to be integrated here + ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false' + # Let VS auto-set the RTC level + ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default' + # Incremental linking increases code size + if '/INCREMENTAL:NO' in buildtype_link_args: + ET.SubElement(type_config, 'LinkIncremental').text = 'false' + + # Build information + compiles = ET.SubElement(root, 'ItemDefinitionGroup') + clconf = ET.SubElement(compiles, 'ClCompile') + # CRT type; debug or release + if vscrt_type.value == 'from_buildtype': + if self.buildtype == 'debug': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' + elif vscrt_type.value == 'static_from_buildtype': + if self.buildtype == 'debug': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' + elif vscrt_type.value == 'mdd': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' + elif vscrt_type.value == 'mt': + # FIXME, wrong + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' + elif vscrt_type.value == 'mtd': + # FIXME, wrong + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' + # Sanitizers + if '/fsanitize=address' in build_args: + ET.SubElement(type_config, 'EnableASAN').text = 'true' + # Debug format + if '/ZI' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'EditAndContinue' + elif '/Zi' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'ProgramDatabase' + elif '/Z7' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'OldStyle' + else: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'None' + # Runtime checks + if '/RTC1' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'EnableFastChecks' + elif '/RTCu' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck' + elif '/RTCs' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck' + # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise + # cl will give warning D9025: overriding '/Ehs' with cpp_eh value + if 'cpp' in target.compilers: + eh = self.environment.coredata.options[OptionKey('eh', machine=target.for_machine, lang='cpp')] + if eh.value == 'a': + ET.SubElement(clconf, 'ExceptionHandling').text = 'Async' + elif eh.value == 's': + ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow' + elif eh.value == 'none': + ET.SubElement(clconf, 'ExceptionHandling').text = 'false' + else: # 'sc' or 'default' + ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync' + # End configuration + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root) + (gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files) + (custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(custom_target_output_files) + gen_src += custom_src + gen_hdrs += custom_hdrs + gen_langs += custom_langs + # Project information + direlem = ET.SubElement(root, 'PropertyGroup') + fver = ET.SubElement(direlem, '_ProjectFileVersion') + fver.text = self.project_file_version + outdir = ET.SubElement(direlem, 'OutDir') + outdir.text = '.\\' + intdir = ET.SubElement(direlem, 'IntDir') + intdir.text = target.get_id() + '\\' + tfilename = os.path.splitext(target.get_filename()) + ET.SubElement(direlem, 'TargetName').text = tfilename[0] + ET.SubElement(direlem, 'TargetExt').text = tfilename[1] + + # Arguments, include dirs, defines for all files in the current target + target_args = [] + target_defines = [] + target_inc_dirs = [] + # Arguments, include dirs, defines passed to individual files in + # a target; perhaps because the args are language-specific + # + # file_args is also later split out into defines and include_dirs in + # case someone passed those in there + file_args = {l: c.compiler_args() for l, c in target.compilers.items()} + file_defines = {l: [] for l in target.compilers} + file_inc_dirs = {l: [] for l in target.compilers} + # The order in which these compile args are added must match + # generate_single_compile() and generate_basic_compiler_args() + for l, comp in target.compilers.items(): + if l in file_args: + file_args[l] += compilers.get_base_compile_args( + self.get_base_options_for_target(target), comp) + file_args[l] += comp.get_option_compile_args( + self.environment.coredata.options) + + # Add compile args added using add_project_arguments() + for l, args in self.build.projects_args[target.for_machine].get(target.subproject, {}).items(): + if l in file_args: + file_args[l] += args + # Add compile args added using add_global_arguments() + # These override per-project arguments + for l, args in self.build.global_args[target.for_machine].items(): + if l in file_args: + file_args[l] += args + # Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these + # to override all the defaults, but not the per-target compile args. + for l in file_args.keys(): + opts = self.environment.coredata.options[OptionKey('args', machine=target.for_machine, lang=l)] + file_args[l] += opts.value + for args in file_args.values(): + # This is where Visual Studio will insert target_args, target_defines, + # etc, which are added later from external deps (see below). + args += ['%(AdditionalOptions)', '%(PreprocessorDefinitions)', '%(AdditionalIncludeDirectories)'] + # Add custom target dirs as includes automatically, but before + # target-specific include dirs. See _generate_single_compile() in + # the ninja backend for caveats. + args += ['-I' + arg for arg in generated_files_include_dirs] + # Add include dirs from the `include_directories:` kwarg on the target + # and from `include_directories:` of internal deps of the target. + # + # Target include dirs should override internal deps include dirs. + # This is handled in BuildTarget.process_kwargs() + # + # Include dirs from internal deps should override include dirs from + # external deps and must maintain the order in which they are + # specified. Hence, we must reverse so that the order is preserved. + # + # These are per-target, but we still add them as per-file because we + # need them to be looked in first. + for d in reversed(target.get_include_dirs()): + # reversed is used to keep order of includes + for i in reversed(d.get_incdirs()): + curdir = os.path.join(d.get_curdir(), i) + args.append('-I' + self.relpath(curdir, target.subdir)) # build dir + args.append('-I' + os.path.join(proj_to_src_root, curdir)) # src dir + for i in d.get_extra_build_dirs(): + curdir = os.path.join(d.get_curdir(), i) + args.append('-I' + self.relpath(curdir, target.subdir)) # build dir + # Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these + # near the end since these are supposed to override everything else. + for l, args in target.extra_args.items(): + if l in file_args: + file_args[l] += args + # The highest priority includes. In order of directory search: + # target private dir, target build dir, target source dir + for args in file_args.values(): + t_inc_dirs = [self.relpath(self.get_target_private_dir(target), + self.get_target_dir(target))] + if target.implicit_include_directories: + t_inc_dirs += ['.', proj_to_src_dir] + args += ['-I' + arg for arg in t_inc_dirs] + + # Split preprocessor defines and include directories out of the list of + # all extra arguments. The rest go into %(AdditionalOptions). + for l, args in file_args.items(): + for arg in args[:]: + if arg.startswith(('-D', '/D')) or arg == '%(PreprocessorDefinitions)': + file_args[l].remove(arg) + # Don't escape the marker + if arg == '%(PreprocessorDefinitions)': + define = arg + else: + define = arg[2:] + # De-dup + if define not in file_defines[l]: + file_defines[l].append(define) + elif arg.startswith(('-I', '/I')) or arg == '%(AdditionalIncludeDirectories)': + file_args[l].remove(arg) + # Don't escape the marker + if arg == '%(AdditionalIncludeDirectories)': + inc_dir = arg + else: + inc_dir = arg[2:] + # De-dup + if inc_dir not in file_inc_dirs[l]: + file_inc_dirs[l].append(inc_dir) + # Add include dirs to target as well so that "Go to Document" works in headers + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) + + # Split compile args needed to find external dependencies + # Link args are added while generating the link command + for d in reversed(target.get_external_deps()): + # Cflags required by external deps might have UNIX-specific flags, + # so filter them out if needed + if isinstance(d, dependencies.OpenMPDependency): + ET.SubElement(clconf, 'OpenMPSupport').text = 'true' + else: + d_compile_args = compiler.unix_args_to_native(d.get_compile_args()) + for arg in d_compile_args: + if arg.startswith(('-D', '/D')): + define = arg[2:] + # De-dup + if define in target_defines: + target_defines.remove(define) + target_defines.append(define) + elif arg.startswith(('-I', '/I')): + inc_dir = arg[2:] + # De-dup + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) + else: + target_args.append(arg) + + languages += gen_langs + if '/Gw' in build_args: + target_args.append('/Gw') + if len(target_args) > 0: + target_args.append('%(AdditionalOptions)') + ET.SubElement(clconf, "AdditionalOptions").text = ' '.join(target_args) + ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs) + target_defines.append('%(PreprocessorDefinitions)') + ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines) + ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true' + # Warning level + warning_level = self.get_option_for_target(OptionKey('warning_level'), target) + ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level)) + if self.get_option_for_target(OptionKey('werror'), target): + ET.SubElement(clconf, 'TreatWarningAsError').text = 'true' + # Optimization flags + o_flags = split_o_flags_args(build_args) + if '/Ox' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'Full' + elif '/O2' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'MaxSpeed' + elif '/O1' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'MinSpace' + elif '/Od' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'Disabled' + if '/Oi' in o_flags: + ET.SubElement(clconf, 'IntrinsicFunctions').text = 'true' + if '/Ob1' in o_flags: + ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'OnlyExplicitInline' + elif '/Ob2' in o_flags: + ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'AnySuitable' + # Size-preserving flags + if '/Os' in o_flags: + ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Size' + else: + ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Speed' + # Note: SuppressStartupBanner is /NOLOGO and is 'true' by default + pch_sources = {} + if self.environment.coredata.options.get(OptionKey('b_pch')): + for lang in ['c', 'cpp']: + pch = target.get_pch(lang) + if not pch: + continue + if compiler.id == 'msvc': + if len(pch) == 1: + # Auto generate PCH. + src = os.path.join(down, self.create_msvc_pch_implementation(target, lang, pch[0])) + pch_header_dir = os.path.dirname(os.path.join(proj_to_src_dir, pch[0])) + else: + src = os.path.join(proj_to_src_dir, pch[1]) + pch_header_dir = None + pch_sources[lang] = [pch[0], src, lang, pch_header_dir] + else: + # I don't know whether its relevant but let's handle other compilers + # used with a vs backend + pch_sources[lang] = [pch[0], None, lang, None] + + resourcecompile = ET.SubElement(compiles, 'ResourceCompile') + ET.SubElement(resourcecompile, 'PreprocessorDefinitions') + + # Linker options + link = ET.SubElement(compiles, 'Link') + extra_link_args = compiler.compiler_args() + # FIXME: Can these buildtype linker args be added as tags in the + # vcxproj file (similar to buildtype compiler args) instead of in + # AdditionalOptions? + extra_link_args += compiler.get_buildtype_linker_args(self.buildtype) + # Generate Debug info + if self.debug: + self.generate_debug_information(link) + else: + ET.SubElement(link, 'GenerateDebugInformation').text = 'false' + if not isinstance(target, build.StaticLibrary): + if isinstance(target, build.SharedModule): + options = self.environment.coredata.options + extra_link_args += compiler.get_std_shared_module_link_args(options) + # Add link args added using add_project_link_arguments() + extra_link_args += self.build.get_project_link_args(compiler, target.subproject, target.for_machine) + # Add link args added using add_global_link_arguments() + # These override per-project link arguments + extra_link_args += self.build.get_global_link_args(compiler, target.for_machine) + # Link args added from the env: LDFLAGS, or the cross file. We want + # these to override all the defaults but not the per-target link + # args. + extra_link_args += self.environment.coredata.get_external_link_args(target.for_machine, compiler.get_language()) + # Only non-static built targets need link args and link dependencies + extra_link_args += target.link_args + # External deps must be last because target link libraries may depend on them. + for dep in target.get_external_deps(): + # Extend without reordering or de-dup to preserve `-L -l` sets + # https://github.com/mesonbuild/meson/issues/1718 + if isinstance(dep, dependencies.OpenMPDependency): + ET.SubElement(clconf, 'OpenMPSuppport').text = 'true' + else: + extra_link_args.extend_direct(dep.get_link_args()) + for d in target.get_dependencies(): + if isinstance(d, build.StaticLibrary): + for dep in d.get_external_deps(): + if isinstance(dep, dependencies.OpenMPDependency): + ET.SubElement(clconf, 'OpenMPSuppport').text = 'true' + else: + extra_link_args.extend_direct(dep.get_link_args()) + # Add link args for c_* or cpp_* build options. Currently this only + # adds c_winlibs and cpp_winlibs when building for Windows. This needs + # to be after all internal and external libraries so that unresolved + # symbols from those can be found here. This is needed when the + # *_winlibs that we want to link to are static mingw64 libraries. + extra_link_args += compiler.get_option_link_args(self.environment.coredata.options) + (additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native()) + + # Add more libraries to be linked if needed + for t in target.get_dependencies(): + if isinstance(t, build.CustomTargetIndex): + # We don't need the actual project here, just the library name + lobj = t + else: + lobj = self.build.targets[t.get_id()] + linkname = os.path.join(down, self.get_target_filename_for_linking(lobj)) + if t in target.link_whole_targets: + if compiler.id == 'msvc' and version_compare(compiler.version, '<19.00.23918'): + # Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2 + l = t.extract_all_objects(False) + + # Unforunately, we can't use self.object_filename_from_source() + gensrclist: T.List[File] = [] + for gen in l.genlist: + for src in gen.get_outputs(): + if self.environment.is_source(src) and not self.environment.is_header(src): + path = self.get_target_generated_dir(t, gen, src) + gen_src_ext = '.' + os.path.splitext(path)[1][1:] + extra_link_args.append(path[:-len(gen_src_ext)] + '.obj') + + for src in l.srclist: + obj_basename = None + if self.environment.is_source(src) and not self.environment.is_header(src): + obj_basename = self.object_filename_from_source(t, src) + target_private_dir = self.relpath(self.get_target_private_dir(t), + self.get_target_dir(t)) + rel_obj = os.path.join(target_private_dir, obj_basename) + extra_link_args.append(rel_obj) + + extra_link_args.extend(self.flatten_object_list(t)) + else: + # /WHOLEARCHIVE:foo must go into AdditionalOptions + extra_link_args += compiler.get_link_whole_for(linkname) + # To force Visual Studio to build this project even though it + # has no sources, we include a reference to the vcxproj file + # that builds this target. Technically we should add this only + # if the current target has no sources, but it doesn't hurt to + # have 'extra' references. + trelpath = self.get_target_dir_relative_to(t, target) + tvcxproj = os.path.join(trelpath, t.get_id() + '.vcxproj') + tid = self.environment.coredata.target_guids[t.get_id()] + self.add_project_reference(root, tvcxproj, tid, link_outputs=True) + # Mark the dependency as already handled to not have + # multiple references to the same target. + self.handled_target_deps[target.get_id()].append(t.get_id()) + else: + # Other libraries go into AdditionalDependencies + if linkname not in additional_links: + additional_links.append(linkname) + for lib in self.get_custom_target_provided_libraries(target): + additional_links.append(self.relpath(lib, self.get_target_dir(target))) + additional_objects = [] + for o in self.flatten_object_list(target, down): + assert(isinstance(o, str)) + additional_objects.append(o) + for o in custom_objs: + additional_objects.append(o) + + if len(extra_link_args) > 0: + extra_link_args.append('%(AdditionalOptions)') + ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args) + if len(additional_libpaths) > 0: + additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)') + ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths) + if len(additional_links) > 0: + additional_links.append('%(AdditionalDependencies)') + ET.SubElement(link, 'AdditionalDependencies').text = ';'.join(additional_links) + ofile = ET.SubElement(link, 'OutputFile') + ofile.text = '$(OutDir)%s' % target.get_filename() + subsys = ET.SubElement(link, 'SubSystem') + subsys.text = subsystem + if (isinstance(target, build.SharedLibrary) or isinstance(target, build.Executable)) and target.get_import_filename(): + # DLLs built with MSVC always have an import library except when + # they're data-only DLLs, but we don't support those yet. + ET.SubElement(link, 'ImportLibrary').text = target.get_import_filename() + if isinstance(target, build.SharedLibrary): + # Add module definitions file, if provided + if target.vs_module_defs: + relpath = os.path.join(down, target.vs_module_defs.rel_to_builddir(self.build_to_src)) + ET.SubElement(link, 'ModuleDefinitionFile').text = relpath + if self.debug: + pdb = ET.SubElement(link, 'ProgramDataBaseFileName') + pdb.text = '$(OutDir}%s.pdb' % target_name + targetmachine = ET.SubElement(link, 'TargetMachine') + targetplatform = self.platform.lower() + if targetplatform == 'win32': + targetmachine.text = 'MachineX86' + elif targetplatform == 'x64': + targetmachine.text = 'MachineX64' + elif targetplatform == 'arm': + targetmachine.text = 'MachineARM' + elif targetplatform == 'arm64': + targetmachine.text = 'MachineARM64' + elif targetplatform == 'arm64ec': + targetmachine.text = 'MachineARM64EC' + else: + raise MesonException('Unsupported Visual Studio target machine: ' + targetplatform) + # /nologo + ET.SubElement(link, 'SuppressStartupBanner').text = 'true' + # /release + if not self.environment.coredata.get_option(OptionKey('debug')): + ET.SubElement(link, 'SetChecksum').text = 'true' + + meson_file_group = ET.SubElement(root, 'ItemGroup') + ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename)) + + # Visual Studio can't load projects that present duplicated items. Filter them out + # by keeping track of already added paths. + def path_normalize_add(path, lis): + normalized = os.path.normcase(os.path.normpath(path)) + if normalized not in lis: + lis.append(normalized) + return True + else: + return False + + previous_includes = [] + if len(headers) + len(gen_hdrs) + len(target.extra_files) + len(pch_sources) > 0: + inc_hdrs = ET.SubElement(root, 'ItemGroup') + for h in headers: + relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_includes): + ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath) + for h in gen_hdrs: + if path_normalize_add(h, previous_includes): + ET.SubElement(inc_hdrs, 'CLInclude', Include=h) + for h in target.extra_files: + relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_includes): + ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath) + for lang in pch_sources: + h = pch_sources[lang][0] + path = os.path.join(proj_to_src_dir, h) + if path_normalize_add(path, previous_includes): + ET.SubElement(inc_hdrs, 'CLInclude', Include=path) + + previous_sources = [] + if len(sources) + len(gen_src) + len(pch_sources) > 0: + inc_src = ET.SubElement(root, 'ItemGroup') + for s in sources: + relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_sources): + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath) + lang = Vs2010Backend.lang_from_source_file(s) + self.add_pch(pch_sources, lang, inc_cl) + self.add_additional_options(lang, inc_cl, file_args) + self.add_preprocessor_defines(lang, inc_cl, file_defines) + self.add_include_dirs(lang, inc_cl, file_inc_dirs) + ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + self.object_filename_from_source(target, s) + for s in gen_src: + if path_normalize_add(s, previous_sources): + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s) + lang = Vs2010Backend.lang_from_source_file(s) + self.add_pch(pch_sources, lang, inc_cl) + self.add_additional_options(lang, inc_cl, file_args) + self.add_preprocessor_defines(lang, inc_cl, file_defines) + self.add_include_dirs(lang, inc_cl, file_inc_dirs) + for lang in pch_sources: + impl = pch_sources[lang][1] + if impl and path_normalize_add(impl, previous_sources): + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=impl) + self.create_pch(pch_sources, lang, inc_cl) + self.add_additional_options(lang, inc_cl, file_args) + self.add_preprocessor_defines(lang, inc_cl, file_defines) + pch_header_dir = pch_sources[lang][3] + if pch_header_dir: + inc_dirs = copy.deepcopy(file_inc_dirs) + inc_dirs[lang] = [pch_header_dir] + inc_dirs[lang] + else: + inc_dirs = file_inc_dirs + self.add_include_dirs(lang, inc_cl, inc_dirs) + + previous_objects = [] + if self.has_objects(objects, additional_objects, gen_objs): + inc_objs = ET.SubElement(root, 'ItemGroup') + for s in objects: + relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_objects): + ET.SubElement(inc_objs, 'Object', Include=relpath) + for s in additional_objects: + if path_normalize_add(s, previous_objects): + ET.SubElement(inc_objs, 'Object', Include=s) + self.add_generated_objects(inc_objs, gen_objs) + + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_regenproj(self, project_name, ofname): + root = ET.Element('Project', {'DefaultTargets': 'Build', + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) + prjconf = ET.SubElement(confitems, 'ProjectConfiguration', + {'Include': self.buildtype + '|' + self.platform}) + p = ET.SubElement(prjconf, 'Configuration') + p.text = self.buildtype + pl = ET.SubElement(prjconf, 'Platform') + pl.text = self.platform + globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') + guidelem = ET.SubElement(globalgroup, 'ProjectGuid') + guidelem.text = '{%s}' % self.environment.coredata.regen_guid + kw = ET.SubElement(globalgroup, 'Keyword') + kw.text = self.platform + 'Proj' + p = ET.SubElement(globalgroup, 'Platform') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') + pname.text = project_name + if self.windows_target_platform_version: + ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props') + type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') + ET.SubElement(type_config, 'ConfigurationType').text = "Utility" + ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte' + ET.SubElement(type_config, 'UseOfMfc').text = 'false' + if self.platform_toolset: + ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + direlem = ET.SubElement(root, 'PropertyGroup') + fver = ET.SubElement(direlem, '_ProjectFileVersion') + fver.text = self.project_file_version + outdir = ET.SubElement(direlem, 'OutDir') + outdir.text = '.\\' + intdir = ET.SubElement(direlem, 'IntDir') + intdir.text = 'regen-temp\\' + tname = ET.SubElement(direlem, 'TargetName') + tname.text = project_name + + action = ET.SubElement(root, 'ItemDefinitionGroup') + midl = ET.SubElement(action, 'Midl') + ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)' + ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)' + ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h' + ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb' + ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c' + ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' + regen_command = self.environment.get_build_command() + ['--internal', 'regencheck'] + cmd_templ = '''call %s > NUL +"%s" "%s"''' + regen_command = cmd_templ % \ + (self.get_vcvars_command(), '" "'.join(regen_command), self.environment.get_scratch_dir()) + self.add_custom_build(root, 'regen', regen_command, deps=self.get_regen_filelist(), + outputs=[Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())], + msg='Checking whether solution needs to be regenerated.') + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets') + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_testproj(self, target_name, ofname): + project_name = target_name + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) + prjconf = ET.SubElement(confitems, 'ProjectConfiguration', + {'Include': self.buildtype + '|' + self.platform}) + p = ET.SubElement(prjconf, 'Configuration') + p.text = self.buildtype + pl = ET.SubElement(prjconf, 'Platform') + pl.text = self.platform + globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') + guidelem = ET.SubElement(globalgroup, 'ProjectGuid') + guidelem.text = '{%s}' % self.environment.coredata.test_guid + kw = ET.SubElement(globalgroup, 'Keyword') + kw.text = self.platform + 'Proj' + p = ET.SubElement(globalgroup, 'Platform') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') + pname.text = project_name + if self.windows_target_platform_version: + ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props') + type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') + ET.SubElement(type_config, 'ConfigurationType') + ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte' + ET.SubElement(type_config, 'UseOfMfc').text = 'false' + if self.platform_toolset: + ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + direlem = ET.SubElement(root, 'PropertyGroup') + fver = ET.SubElement(direlem, '_ProjectFileVersion') + fver.text = self.project_file_version + outdir = ET.SubElement(direlem, 'OutDir') + outdir.text = '.\\' + intdir = ET.SubElement(direlem, 'IntDir') + intdir.text = 'test-temp\\' + tname = ET.SubElement(direlem, 'TargetName') + tname.text = target_name + + action = ET.SubElement(root, 'ItemDefinitionGroup') + midl = ET.SubElement(action, 'Midl') + ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)' + ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)' + ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h' + ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb' + ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c' + ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' + # FIXME: No benchmarks? + test_command = self.environment.get_build_command() + ['test', '--no-rebuild'] + if not self.environment.coredata.get_option(OptionKey('stdsplit')): + test_command += ['--no-stdsplit'] + if self.environment.coredata.get_option(OptionKey('errorlogs')): + test_command += ['--print-errorlogs'] + self.serialize_tests() + self.add_custom_build(root, 'run_tests', '"%s"' % ('" "'.join(test_command))) + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_installproj(self, target_name, ofname): + self.create_install_data_files() + project_name = target_name + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) + prjconf = ET.SubElement(confitems, 'ProjectConfiguration', + {'Include': self.buildtype + '|' + self.platform}) + p = ET.SubElement(prjconf, 'Configuration') + p.text = self.buildtype + pl = ET.SubElement(prjconf, 'Platform') + pl.text = self.platform + globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') + guidelem = ET.SubElement(globalgroup, 'ProjectGuid') + guidelem.text = '{%s}' % self.environment.coredata.install_guid + kw = ET.SubElement(globalgroup, 'Keyword') + kw.text = self.platform + 'Proj' + p = ET.SubElement(globalgroup, 'Platform') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') + pname.text = project_name + if self.windows_target_platform_version: + ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props') + type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') + ET.SubElement(type_config, 'ConfigurationType') + ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte' + ET.SubElement(type_config, 'UseOfMfc').text = 'false' + if self.platform_toolset: + ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + direlem = ET.SubElement(root, 'PropertyGroup') + fver = ET.SubElement(direlem, '_ProjectFileVersion') + fver.text = self.project_file_version + outdir = ET.SubElement(direlem, 'OutDir') + outdir.text = '.\\' + intdir = ET.SubElement(direlem, 'IntDir') + intdir.text = 'install-temp\\' + tname = ET.SubElement(direlem, 'TargetName') + tname.text = target_name + + action = ET.SubElement(root, 'ItemDefinitionGroup') + midl = ET.SubElement(action, 'Midl') + ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)' + ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)' + ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h' + ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb' + ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c' + ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' + install_command = self.environment.get_build_command() + ['install', '--no-rebuild'] + self.add_custom_build(root, 'run_install', '"%s"' % ('" "'.join(install_command))) + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def add_custom_build(self, node, rulename, command, deps=None, outputs=None, msg=None, verify_files=True): + igroup = ET.SubElement(node, 'ItemGroup') + rulefile = os.path.join(self.environment.get_scratch_dir(), rulename + '.rule') + if not os.path.exists(rulefile): + with open(rulefile, 'w', encoding='utf-8') as f: + f.write("# Meson regen file.") + custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile) + if msg: + message = ET.SubElement(custombuild, 'Message') + message.text = msg + if not verify_files: + ET.SubElement(custombuild, 'VerifyInputsAndOutputsExist').text = 'false' + cmd_templ = '''setlocal +%s +if %%errorlevel%% neq 0 goto :cmEnd +:cmEnd +endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone +:cmErrorLevel +exit /b %%1 +:cmDone +if %%errorlevel%% neq 0 goto :VCEnd''' + ET.SubElement(custombuild, 'Command').text = cmd_templ % command + if not outputs: + # Use a nonexistent file to always consider the target out-of-date. + outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(), + 'outofdate.file'))] + ET.SubElement(custombuild, 'Outputs').text = ';'.join(outputs) + if deps: + ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps) + + @staticmethod + def nonexistent_file(prefix): + i = 0 + file = prefix + while os.path.exists(file): + file = '%s%d' % (prefix, i) + return file + + def generate_debug_information(self, link): + # valid values for vs2015 is 'false', 'true', 'DebugFastLink' + ET.SubElement(link, 'GenerateDebugInformation').text = 'true' + + def add_regen_dependency(self, root): + regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj') + self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid) diff --git a/meson/mesonbuild/backend/vs2012backend.py b/meson/mesonbuild/backend/vs2012backend.py new file mode 100644 index 000000000..a9ba5f476 --- /dev/null +++ b/meson/mesonbuild/backend/vs2012backend.py @@ -0,0 +1,38 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException +from ..interpreter import Interpreter +from ..build import Build +import typing as T + + +class Vs2012Backend(Vs2010Backend): + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'vs2012' + self.vs_version = '2012' + if self.environment is not None: + # TODO: we assume host == build + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v110' diff --git a/meson/mesonbuild/backend/vs2013backend.py b/meson/mesonbuild/backend/vs2013backend.py new file mode 100644 index 000000000..0f2c8bdc6 --- /dev/null +++ b/meson/mesonbuild/backend/vs2013backend.py @@ -0,0 +1,38 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException +from ..interpreter import Interpreter +from ..build import Build +import typing as T + + +class Vs2013Backend(Vs2010Backend): + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'vs2013' + self.vs_version = '2013' + if self.environment is not None: + # TODO: we assume host == build + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v120' diff --git a/meson/mesonbuild/backend/vs2015backend.py b/meson/mesonbuild/backend/vs2015backend.py new file mode 100644 index 000000000..bdc1675a2 --- /dev/null +++ b/meson/mesonbuild/backend/vs2015backend.py @@ -0,0 +1,38 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException +from ..interpreter import Interpreter +from ..build import Build +import typing as T + + +class Vs2015Backend(Vs2010Backend): + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'vs2015' + self.vs_version = '2015' + if self.environment is not None: + # TODO: we assume host == build + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v140' diff --git a/meson/mesonbuild/backend/vs2017backend.py b/meson/mesonbuild/backend/vs2017backend.py new file mode 100644 index 000000000..fa216065c --- /dev/null +++ b/meson/mesonbuild/backend/vs2017backend.py @@ -0,0 +1,52 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import typing as T +import xml.etree.ElementTree as ET + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException +from ..interpreter import Interpreter +from ..build import Build + + +class Vs2017Backend(Vs2010Backend): + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'vs2017' + self.vs_version = '2017' + # We assume that host == build + if self.environment is not None: + comps = self.environment.coredata.compilers.host + if comps: + if comps and all(c.id == 'clang-cl' for c in comps.values()): + self.platform_toolset = 'llvm' + elif comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v141' + # WindowsSDKVersion should be set by command prompt. + sdk_version = os.environ.get('WindowsSDKVersion', None) + if sdk_version: + self.windows_target_platform_version = sdk_version.rstrip('\\') + + def generate_debug_information(self, link): + # valid values for vs2017 is 'false', 'true', 'DebugFastLink', 'DebugFull' + ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull' diff --git a/meson/mesonbuild/backend/vs2019backend.py b/meson/mesonbuild/backend/vs2019backend.py new file mode 100644 index 000000000..8f304e48e --- /dev/null +++ b/meson/mesonbuild/backend/vs2019backend.py @@ -0,0 +1,47 @@ +# Copyright 2014-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import typing as T +import xml.etree.ElementTree as ET + +from .vs2010backend import Vs2010Backend +from ..interpreter import Interpreter +from ..build import Build + + +class Vs2019Backend(Vs2010Backend): + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'vs2019' + if self.environment is not None: + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'clang-cl' for c in comps.values()): + self.platform_toolset = 'ClangCL' + elif comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + # We don't have support for versions older than 2019 right now. + if not self.platform_toolset: + self.platform_toolset = 'v142' + self.vs_version = '2019' + # WindowsSDKVersion should be set by command prompt. + sdk_version = os.environ.get('WindowsSDKVersion', None) + if sdk_version: + self.windows_target_platform_version = sdk_version.rstrip('\\') + + def generate_debug_information(self, link): + # valid values for vs2019 is 'false', 'true', 'DebugFastLink', 'DebugFull' + ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull' diff --git a/meson/mesonbuild/backend/xcodebackend.py b/meson/mesonbuild/backend/xcodebackend.py new file mode 100644 index 000000000..ff48ecf1c --- /dev/null +++ b/meson/mesonbuild/backend/xcodebackend.py @@ -0,0 +1,1708 @@ +# Copyright 2014-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import backends +from .. import build +from .. import dependencies +from .. import mesonlib +from .. import mlog +import uuid, os, operator +import typing as T + +from ..mesonlib import MesonException, OptionKey +from ..interpreter import Interpreter + +INDENT = '\t' +XCODETYPEMAP = {'c': 'sourcecode.c.c', + 'a': 'archive.ar', + 'cc': 'sourcecode.cpp.cpp', + 'cxx': 'sourcecode.cpp.cpp', + 'cpp': 'sourcecode.cpp.cpp', + 'c++': 'sourcecode.cpp.cpp', + 'm': 'sourcecode.c.objc', + 'mm': 'sourcecode.cpp.objcpp', + 'h': 'sourcecode.c.h', + 'hpp': 'sourcecode.cpp.h', + 'hxx': 'sourcecode.cpp.h', + 'hh': 'sourcecode.cpp.hh', + 'inc': 'sourcecode.c.h', + 'swift': 'sourcecode.swift', + 'dylib': 'compiled.mach-o.dylib', + 'o': 'compiled.mach-o.objfile', + 's': 'sourcecode.asm', + 'asm': 'sourcecode.asm', + } +LANGNAMEMAP = {'c': 'C', + 'cpp': 'CPLUSPLUS', + 'objc': 'OBJC', + 'objcpp': 'OBJCPLUSPLUS', + 'swift': 'SWIFT_' + } +OPT2XCODEOPT = {'0': '0', + 'g': '0', + '1': '1', + '2': '2', + '3': '3', + 's': 's', + } +BOOL2XCODEBOOL = {True: 'YES', False: 'NO'} +LINKABLE_EXTENSIONS = {'.o', '.a', '.obj', '.so', '.dylib'} + +class FileTreeEntry: + + def __init__(self): + self.subdirs = {} + self.targets = [] + +class PbxItem: + def __init__(self, value, comment = ''): + self.value = value + self.comment = comment + +class PbxArray: + def __init__(self): + self.items = [] + + def add_item(self, item, comment=''): + if isinstance(item, PbxArrayItem): + self.items.append(item) + else: + self.items.append(PbxArrayItem(item, comment)) + + def write(self, ofile, indent_level): + ofile.write('(\n') + indent_level += 1 + for i in self.items: + if i.comment: + ofile.write(indent_level*INDENT + f'{i.value} {i.comment},\n') + else: + ofile.write(indent_level*INDENT + f'{i.value},\n') + indent_level -= 1 + ofile.write(indent_level*INDENT + ');\n') + +class PbxArrayItem: + def __init__(self, value, comment = ''): + self.value = value + if comment: + if '/*' in comment: + self.comment = comment + else: + self.comment = f'/* {comment} */' + else: + self.comment = comment + +class PbxComment: + def __init__(self, text): + assert(isinstance(text, str)) + assert('/*' not in text) + self.text = f'/* {text} */' + + def write(self, ofile, indent_level): + ofile.write(f'\n{self.text}\n') + +class PbxDictItem: + def __init__(self, key, value, comment = ''): + self.key = key + self.value = value + if comment: + if '/*' in comment: + self.comment = comment + else: + self.comment = f'/* {comment} */' + else: + self.comment = comment + +class PbxDict: + def __init__(self): + # This class is a bit weird, because we want to write PBX dicts in + # defined order _and_ we want to write intermediate comments also in order. + self.keys = set() + self.items = [] + + def add_item(self, key, value, comment=''): + item = PbxDictItem(key, value, comment) + assert(key not in self.keys) + self.keys.add(key) + self.items.append(item) + + def add_comment(self, comment): + if isinstance(comment, str): + self.items.append(PbxComment(str)) + else: + assert(isinstance(comment, PbxComment)) + self.items.append(comment) + + def write(self, ofile, indent_level): + ofile.write('{\n') + indent_level += 1 + for i in self.items: + if isinstance(i, PbxComment): + i.write(ofile, indent_level) + elif isinstance(i, PbxDictItem): + if isinstance(i.value, (str, int)): + if i.comment: + ofile.write(indent_level*INDENT + f'{i.key} = {i.value} {i.comment};\n') + else: + ofile.write(indent_level*INDENT + f'{i.key} = {i.value};\n') + elif isinstance(i.value, PbxDict): + if i.comment: + ofile.write(indent_level*INDENT + f'{i.key} {i.comment} = ') + else: + ofile.write(indent_level*INDENT + f'{i.key} = ') + i.value.write(ofile, indent_level) + elif isinstance(i.value, PbxArray): + if i.comment: + ofile.write(indent_level*INDENT + f'{i.key} {i.comment} = ') + else: + ofile.write(indent_level*INDENT + f'{i.key} = ') + i.value.write(ofile, indent_level) + else: + print(i) + print(i.key) + print(i.value) + raise RuntimeError('missing code') + else: + print(i) + raise RuntimeError('missing code2') + + indent_level -= 1 + ofile.write(indent_level*INDENT + '}') + if indent_level == 0: + ofile.write('\n') + else: + ofile.write(';\n') + +class XCodeBackend(backends.Backend): + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'xcode' + self.project_uid = self.environment.coredata.lang_guids['default'].replace('-', '')[:24] + self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype')) + self.project_conflist = self.gen_id() + self.maingroup_id = self.gen_id() + self.all_id = self.gen_id() + self.all_buildconf_id = self.gen_id() + self.buildtypes = [self.buildtype] + self.test_id = self.gen_id() + self.test_command_id = self.gen_id() + self.test_buildconf_id = self.gen_id() + self.regen_id = self.gen_id() + self.regen_command_id = self.gen_id() + self.regen_buildconf_id = self.gen_id() + self.regen_dependency_id = self.gen_id() + self.top_level_dict = PbxDict() + self.generator_outputs = {} + # In Xcode files are not accessed via their file names, but rather every one of them + # gets an unique id. More precisely they get one unique id per target they are used + # in. If you generate only one id per file and use them, compilation will work but the + # UI will only show the file in one target but not the others. Thus they key is + # a tuple containing the target and filename. + self.buildfile_ids = {} + # That is not enough, though. Each target/file combination also gets a unique id + # in the file reference section. Because why not. This means that a source file + # that is used in two targets gets a total of four unique ID numbers. + self.fileref_ids = {} + + def write_pbxfile(self, top_level_dict, ofilename): + tmpname = ofilename + '.tmp' + with open(tmpname, 'w', encoding='utf-8') as ofile: + ofile.write('// !$*UTF8*$!\n') + top_level_dict.write(ofile, 0) + os.replace(tmpname, ofilename) + + def gen_id(self): + return str(uuid.uuid4()).upper().replace('-', '')[:24] + + def get_target_dir(self, target): + dirname = os.path.join(target.get_subdir(), self.environment.coredata.get_option(OptionKey('buildtype'))) + #os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) + return dirname + + def get_custom_target_output_dir(self, target): + dirname = target.get_subdir() + os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) + return dirname + + def target_to_build_root(self, target): + if self.get_target_dir(target) == '': + return '' + directories = os.path.normpath(self.get_target_dir(target)).split(os.sep) + return os.sep.join(['..'] * len(directories)) + + def object_filename_from_source(self, target, source): + # Xcode has the following naming scheme: + # projectname.build/debug/prog@exe.build/Objects-normal/x86_64/func.o + project = self.build.project_name + buildtype = self.buildtype + tname = target.get_id() + arch = 'x86_64' + if isinstance(source, mesonlib.File): + source = source.fname + stem = os.path.splitext(os.path.basename(source))[0] + return f'{project}.build/{buildtype}/{tname}.build/Objects-normal/{arch}/{stem}.o' + + def generate(self): + self.serialize_tests() + # Cache the result as the method rebuilds the array every time it is called. + self.build_targets = self.build.get_build_targets() + self.custom_targets = self.build.get_custom_targets() + self.generate_filemap() + self.generate_buildstylemap() + self.generate_build_phase_map() + self.generate_build_configuration_map() + self.generate_build_configurationlist_map() + self.generate_project_configurations_map() + self.generate_buildall_configurations_map() + self.generate_test_configurations_map() + self.generate_native_target_map() + self.generate_native_frameworks_map() + self.generate_custom_target_map() + self.generate_generator_target_map() + self.generate_source_phase_map() + self.generate_target_dependency_map() + self.generate_pbxdep_map() + self.generate_containerproxy_map() + self.generate_target_file_maps() + self.generate_build_file_maps() + self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj') + os.makedirs(self.proj_dir, exist_ok=True) + self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj') + objects_dict = self.generate_prefix(self.top_level_dict) + objects_dict.add_comment(PbxComment('Begin PBXAggregateTarget section')) + self.generate_pbx_aggregate_target(objects_dict) + objects_dict.add_comment(PbxComment('End PBXAggregateTarget section')) + objects_dict.add_comment(PbxComment('Begin PBXBuildFile section')) + self.generate_pbx_build_file(objects_dict) + objects_dict.add_comment(PbxComment('End PBXBuildFile section')) + objects_dict.add_comment(PbxComment('Begin PBXBuildStyle section')) + self.generate_pbx_build_style(objects_dict) + objects_dict.add_comment(PbxComment('End PBXBuildStyle section')) + objects_dict.add_comment(PbxComment('Begin PBXContainerItemProxy section')) + self.generate_pbx_container_item_proxy(objects_dict) + objects_dict.add_comment(PbxComment('End PBXContainerItemProxy section')) + objects_dict.add_comment(PbxComment('Begin PBXFileReference section')) + self.generate_pbx_file_reference(objects_dict) + objects_dict.add_comment(PbxComment('End PBXFileReference section')) + objects_dict.add_comment(PbxComment('Begin PBXFrameworksBuildPhase section')) + self.generate_pbx_frameworks_buildphase(objects_dict) + objects_dict.add_comment(PbxComment('End PBXFrameworksBuildPhase section')) + objects_dict.add_comment(PbxComment('Begin PBXGroup section')) + self.generate_pbx_group(objects_dict) + objects_dict.add_comment(PbxComment('End PBXGroup section')) + objects_dict.add_comment(PbxComment('Begin PBXNativeTarget section')) + self.generate_pbx_native_target(objects_dict) + objects_dict.add_comment(PbxComment('End PBXNativeTarget section')) + objects_dict.add_comment(PbxComment('Begin PBXProject section')) + self.generate_pbx_project(objects_dict) + objects_dict.add_comment(PbxComment('End PBXProject section')) + objects_dict.add_comment(PbxComment('Begin PBXShellScriptBuildPhase section')) + self.generate_pbx_shell_build_phase(objects_dict) + objects_dict.add_comment(PbxComment('End PBXShellScriptBuildPhase section')) + objects_dict.add_comment(PbxComment('Begin PBXSourcesBuildPhase section')) + self.generate_pbx_sources_build_phase(objects_dict) + objects_dict.add_comment(PbxComment('End PBXSourcesBuildPhase section')) + objects_dict.add_comment(PbxComment('Begin PBXTargetDependency section')) + self.generate_pbx_target_dependency(objects_dict) + objects_dict.add_comment(PbxComment('End PBXTargetDependency section')) + objects_dict.add_comment(PbxComment('Begin XCBuildConfiguration section')) + self.generate_xc_build_configuration(objects_dict) + objects_dict.add_comment(PbxComment('End XCBuildConfiguration section')) + objects_dict.add_comment(PbxComment('Begin XCConfigurationList section')) + self.generate_xc_configurationList(objects_dict) + objects_dict.add_comment(PbxComment('End XCConfigurationList section')) + self.generate_suffix(self.top_level_dict) + self.write_pbxfile(self.top_level_dict, self.proj_file) + self.generate_regen_info() + + def get_xcodetype(self, fname): + xcodetype = XCODETYPEMAP.get(fname.split('.')[-1].lower()) + if not xcodetype: + xcodetype = 'sourcecode.unknown' + return xcodetype + + def generate_filemap(self): + self.filemap = {} # Key is source file relative to src root. + self.target_filemap = {} + for name, t in self.build_targets.items(): + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + self.filemap[s] = self.gen_id() + for o in t.objects: + if isinstance(o, str): + o = os.path.join(t.subdir, o) + self.filemap[o] = self.gen_id() + self.target_filemap[name] = self.gen_id() + + def generate_buildstylemap(self): + self.buildstylemap = {self.buildtype: self.gen_id()} + + def generate_build_phase_map(self): + for tname, t in self.build_targets.items(): + # generate id for our own target-name + t.buildphasemap = {} + t.buildphasemap[tname] = self.gen_id() + # each target can have it's own Frameworks/Sources/..., generate id's for those + t.buildphasemap['Frameworks'] = self.gen_id() + t.buildphasemap['Resources'] = self.gen_id() + t.buildphasemap['Sources'] = self.gen_id() + + def generate_build_configuration_map(self): + self.buildconfmap = {} + for t in self.build_targets: + bconfs = {self.buildtype: self.gen_id()} + self.buildconfmap[t] = bconfs + for t in self.custom_targets: + bconfs = {self.buildtype: self.gen_id()} + self.buildconfmap[t] = bconfs + + def generate_project_configurations_map(self): + self.project_configurations = {self.buildtype: self.gen_id()} + + def generate_buildall_configurations_map(self): + self.buildall_configurations = {self.buildtype: self.gen_id()} + + def generate_test_configurations_map(self): + self.test_configurations = {self.buildtype: self.gen_id()} + + def generate_build_configurationlist_map(self): + self.buildconflistmap = {} + for t in self.build_targets: + self.buildconflistmap[t] = self.gen_id() + for t in self.custom_targets: + self.buildconflistmap[t] = self.gen_id() + + def generate_native_target_map(self): + self.native_targets = {} + for t in self.build_targets: + self.native_targets[t] = self.gen_id() + + def generate_custom_target_map(self): + self.shell_targets = {} + self.custom_target_output_buildfile = {} + self.custom_target_output_fileref = {} + for tname, t in self.custom_targets.items(): + self.shell_targets[tname] = self.gen_id() + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t) + for o in ofilenames: + self.custom_target_output_buildfile[o] = self.gen_id() + self.custom_target_output_fileref[o] = self.gen_id() + + def generate_generator_target_map(self): + # Generator objects do not have natural unique ids + # so use a counter. + self.generator_fileref_ids = {} + self.generator_buildfile_ids = {} + for tname, t in self.build_targets.items(): + generator_id = 0 + for genlist in t.generated: + if not isinstance(genlist, build.GeneratedList): + continue + self.gen_single_target_map(genlist, tname, t, generator_id) + generator_id += 1 + # FIXME add outputs. + for tname, t in self.custom_targets.items(): + generator_id = 0 + for genlist in t.sources: + if not isinstance(genlist, build.GeneratedList): + continue + self.gen_single_target_map(genlist, tname, t, generator_id) + generator_id += 1 + + def gen_single_target_map(self, genlist, tname, t, generator_id): + k = (tname, generator_id) + assert(k not in self.shell_targets) + self.shell_targets[k] = self.gen_id() + ofile_abs = [] + for i in genlist.get_inputs(): + for o_base in genlist.get_outputs_for(i): + o = os.path.join(self.get_target_private_dir(t), o_base) + ofile_abs.append(os.path.join(self.environment.get_build_dir(), o)) + assert(k not in self.generator_outputs) + self.generator_outputs[k] = ofile_abs + buildfile_ids = [] + fileref_ids = [] + for i in range(len(ofile_abs)): + buildfile_ids.append(self.gen_id()) + fileref_ids.append(self.gen_id()) + self.generator_buildfile_ids[k] = buildfile_ids + self.generator_fileref_ids[k] = fileref_ids + + + def generate_native_frameworks_map(self): + self.native_frameworks = {} + self.native_frameworks_fileref = {} + for t in self.build_targets.values(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + self.native_frameworks[f] = self.gen_id() + self.native_frameworks_fileref[f] = self.gen_id() + + def generate_target_dependency_map(self): + self.target_dependency_map = {} + for tname, t in self.build_targets.items(): + for target in t.link_targets: + if isinstance(target, build.CustomTargetIndex): + k = (tname, target.target.get_basename()) + if k in self.target_dependency_map: + continue + else: + k = (tname, target.get_basename()) + assert(k not in self.target_dependency_map) + self.target_dependency_map[k] = self.gen_id() + for tname, t in self.custom_targets.items(): + k = tname + assert(k not in self.target_dependency_map) + self.target_dependency_map[k] = self.gen_id() + + def generate_pbxdep_map(self): + self.pbx_dep_map = {} + self.pbx_custom_dep_map = {} + for t in self.build_targets: + self.pbx_dep_map[t] = self.gen_id() + for t in self.custom_targets: + self.pbx_custom_dep_map[t] = self.gen_id() + + def generate_containerproxy_map(self): + self.containerproxy_map = {} + for t in self.build_targets: + self.containerproxy_map[t] = self.gen_id() + + def generate_target_file_maps(self): + self.generate_target_file_maps_impl(self.build_targets) + self.generate_target_file_maps_impl(self.custom_targets) + + def generate_target_file_maps_impl(self, targets): + for tname, t in targets.items(): + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + if not isinstance(s, str): + continue + k = (tname, s) + assert(k not in self.buildfile_ids) + self.buildfile_ids[k] = self.gen_id() + assert(k not in self.fileref_ids) + self.fileref_ids[k] = self.gen_id() + if not hasattr(t, 'objects'): + continue + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Extracted objects do not live in "the Xcode world". + continue + if isinstance(o, mesonlib.File): + o = os.path.join(o.subdir, o.fname) + if isinstance(o, str): + o = os.path.join(t.subdir, o) + k = (tname, o) + assert(k not in self.buildfile_ids) + self.buildfile_ids[k] = self.gen_id() + assert(k not in self.fileref_ids) + self.fileref_ids[k] = self.gen_id() + else: + raise RuntimeError('Unknown input type ' + str(o)) + + def generate_build_file_maps(self): + for buildfile in self.interpreter.get_build_def_files(): + assert(isinstance(buildfile, str)) + self.buildfile_ids[buildfile] = self.gen_id() + self.fileref_ids[buildfile] = self.gen_id() + + def generate_source_phase_map(self): + self.source_phase = {} + for t in self.build_targets: + self.source_phase[t] = self.gen_id() + + def generate_pbx_aggregate_target(self, objects_dict): + self.custom_aggregate_targets = {} + self.build_all_tdep_id = self.gen_id() + # FIXME: filter out targets that are not built by default. + target_dependencies = list(map(lambda t: self.pbx_dep_map[t], self.build_targets)) + custom_target_dependencies = [self.pbx_custom_dep_map[t] for t in self.custom_targets] + aggregated_targets = [] + aggregated_targets.append((self.all_id, 'ALL_BUILD', + self.all_buildconf_id, + [], + [self.regen_dependency_id] + target_dependencies + custom_target_dependencies)) + aggregated_targets.append((self.test_id, + 'RUN_TESTS', + self.test_buildconf_id, + [self.test_command_id], + [self.regen_dependency_id, self.build_all_tdep_id])) + aggregated_targets.append((self.regen_id, + 'REGENERATE', + self.regen_buildconf_id, + [self.regen_command_id], + [])) + for tname, t in self.build.get_custom_targets().items(): + ct_id = self.gen_id() + self.custom_aggregate_targets[tname] = ct_id + build_phases = [] + dependencies = [self.regen_dependency_id] + generator_id = 0 + for s in t.sources: + if not isinstance(s, build.GeneratedList): + continue + build_phases.append(self.shell_targets[(tname, generator_id)]) + for d in s.depends: + dependencies.append(self.pbx_custom_dep_map[d.get_id()]) + generator_id += 1 + build_phases.append(self.shell_targets[tname]) + aggregated_targets.append((ct_id, tname, self.buildconflistmap[tname], build_phases, dependencies)) + + # Sort objects by ID before writing + sorted_aggregated_targets = sorted(aggregated_targets, key=operator.itemgetter(0)) + for t in sorted_aggregated_targets: + agt_dict = PbxDict() + name = t[1] + buildconf_id = t[2] + build_phases = t[3] + dependencies = t[4] + agt_dict.add_item('isa', 'PBXAggregateTarget') + agt_dict.add_item('buildConfigurationList', buildconf_id, f'Build configuration list for PBXAggregateTarget "{name}"') + bp_arr = PbxArray() + agt_dict.add_item('buildPhases', bp_arr) + for bp in build_phases: + bp_arr.add_item(bp, 'ShellScript') + dep_arr = PbxArray() + agt_dict.add_item('dependencies', dep_arr) + for td in dependencies: + dep_arr.add_item(td, 'PBXTargetDependency') + agt_dict.add_item('name', f'"{name}"') + agt_dict.add_item('productName', f'"{name}"') + objects_dict.add_item(t[0], agt_dict, name) + + def generate_pbx_build_file(self, objects_dict): + for tname, t in self.build_targets.items(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + fw_dict = PbxDict() + objects_dict.add_item(self.native_frameworks[f], fw_dict, f'{f}.framework in Frameworks') + fw_dict.add_item('isa', 'PBXBuildFile') + fw_dict.add_item('fileRef', self.native_frameworks_fileref[f], f) + + for s in t.sources: + in_build_dir = False + if isinstance(s, mesonlib.File): + if s.is_built: + in_build_dir = True + s = os.path.join(s.subdir, s.fname) + + if not isinstance(s, str): + continue + sdict = PbxDict() + k = (tname, s) + idval = self.buildfile_ids[k] + fileref = self.fileref_ids[k] + if in_build_dir: + fullpath = os.path.join(self.environment.get_build_dir(), s) + else: + fullpath = os.path.join(self.environment.get_source_dir(), s) + compiler_args = '' + sdict.add_item('isa', 'PBXBuildFile') + sdict.add_item('fileRef', fileref, fullpath) + objects_dict.add_item(idval, sdict) + + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Object files are not source files as such. We add them + # by hand in linker flags. It is also not particularly + # clear how to define build files in Xcode's file format. + continue + if isinstance(o, mesonlib.File): + o = os.path.join(o.subdir, o.fname) + elif isinstance(o, str): + o = os.path.join(t.subdir, o) + idval = self.buildfile_ids[(tname, o)] + k = (tname, o) + fileref = self.fileref_ids[k] + assert(o not in self.filemap) + self.filemap[o] = idval + fullpath = os.path.join(self.environment.get_source_dir(), o) + fullpath2 = fullpath + o_dict = PbxDict() + objects_dict.add_item(idval, o_dict, fullpath) + o_dict.add_item('isa', 'PBXBuildFile') + o_dict.add_item('fileRef', fileref, fullpath2) + + generator_id = 0 + for g in t.generated: + if not isinstance(g, build.GeneratedList): + continue + self.create_generator_shellphase(objects_dict, tname, generator_id) + generator_id += 1 + + # Custom targets are shell build phases in Xcode terminology. + for tname, t in self.custom_targets.items(): + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t) + for o in ofilenames: + custom_dict = PbxDict() + objects_dict.add_item(self.custom_target_output_buildfile[o], custom_dict, f'/* {o} */') + custom_dict.add_item('isa', 'PBXBuildFile') + custom_dict.add_item('fileRef', self.custom_target_output_fileref[o]) + generator_id = 0 + for g in t.sources: + if not isinstance(g, build.GeneratedList): + continue + self.create_generator_shellphase(objects_dict, tname, generator_id) + generator_id += 1 + + def create_generator_shellphase(self, objects_dict, tname, generator_id): + file_ids = self.generator_buildfile_ids[(tname, generator_id)] + ref_ids = self.generator_fileref_ids[(tname, generator_id)] + assert(len(ref_ids) == len(file_ids)) + for i in range(len(file_ids)): + file_o = file_ids[i] + ref_id = ref_ids[i] + odict = PbxDict() + objects_dict.add_item(file_o, odict) + odict.add_item('isa', 'PBXBuildFile') + odict.add_item('fileRef', ref_id) + + def generate_pbx_build_style(self, objects_dict): + # FIXME: Xcode 9 and later does not uses PBXBuildStyle and it gets removed. Maybe we can remove this part. + for name, idval in self.buildstylemap.items(): + styledict = PbxDict() + objects_dict.add_item(idval, styledict, name) + styledict.add_item('isa', 'PBXBuildStyle') + settings_dict = PbxDict() + styledict.add_item('buildSettings', settings_dict) + settings_dict.add_item('COPY_PHASE_STRIP', 'NO') + styledict.add_item('name', f'"{name}"') + + def generate_pbx_container_item_proxy(self, objects_dict): + for t in self.build_targets: + proxy_dict = PbxDict() + objects_dict.add_item(self.containerproxy_map[t], proxy_dict, 'PBXContainerItemProxy') + proxy_dict.add_item('isa', 'PBXContainerItemProxy') + proxy_dict.add_item('containerPortal', self.project_uid, 'Project object') + proxy_dict.add_item('proxyType', '1') + proxy_dict.add_item('remoteGlobalIDString', self.native_targets[t]) + proxy_dict.add_item('remoteInfo', '"' + t + '"') + + def generate_pbx_file_reference(self, objects_dict): + for tname, t in self.build_targets.items(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + fw_dict = PbxDict() + objects_dict.add_item(self.native_frameworks_fileref[f], fw_dict, f) + fw_dict.add_item('isa', 'PBXFileReference') + fw_dict.add_item('lastKnownFileType', 'wrapper.framework') + fw_dict.add_item('name', f'{f}.framework') + fw_dict.add_item('path', f'System/Library/Frameworks/{f}.framework') + fw_dict.add_item('sourceTree', 'SDKROOT') + for s in t.sources: + in_build_dir = False + if isinstance(s, mesonlib.File): + if s.is_built: + in_build_dir = True + s = os.path.join(s.subdir, s.fname) + if not isinstance(s, str): + continue + idval = self.fileref_ids[(tname, s)] + fullpath = os.path.join(self.environment.get_source_dir(), s) + src_dict = PbxDict() + xcodetype = self.get_xcodetype(s) + name = os.path.basename(s) + path = s + objects_dict.add_item(idval, src_dict, fullpath) + src_dict.add_item('isa', 'PBXFileReference') + src_dict.add_item('explicitFileType', '"' + xcodetype + '"') + src_dict.add_item('fileEncoding', '4') + if in_build_dir: + src_dict.add_item('name', '"' + name + '"') + # This makes no sense. This should say path instead of name + # but then the path gets added twice. + src_dict.add_item('path', '"' + name + '"') + src_dict.add_item('sourceTree', 'BUILD_ROOT') + else: + src_dict.add_item('name', '"' + name + '"') + src_dict.add_item('path', '"' + path + '"') + src_dict.add_item('sourceTree', 'SOURCE_ROOT') + + generator_id = 0 + for g in t.generated: + if not isinstance(g, build.GeneratedList): + continue + outputs = self.generator_outputs[(tname, generator_id)] + ref_ids = self.generator_fileref_ids[tname, generator_id] + assert(len(ref_ids) == len(outputs)) + for i in range(len(outputs)): + o = outputs[i] + ref_id = ref_ids[i] + odict = PbxDict() + name = os.path.basename(o) + objects_dict.add_item(ref_id, odict, o) + xcodetype = self.get_xcodetype(o) + rel_name = mesonlib.relpath(o, self.environment.get_source_dir()) + odict.add_item('isa', 'PBXFileReference') + odict.add_item('explicitFileType', '"' + xcodetype + '"') + odict.add_item('fileEncoding', '4') + odict.add_item('name', f'"{name}"') + odict.add_item('path', f'"{rel_name}"') + odict.add_item('sourceTree', 'SOURCE_ROOT') + + generator_id += 1 + + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Same as with pbxbuildfile. + continue + if isinstance(o, mesonlib.File): + fullpath = o.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + o = os.path.join(o.subdir, o.fname) + else: + o = os.path.join(t.subdir, o) + fullpath = os.path.join(self.environment.get_source_dir(), o) + idval = self.fileref_ids[(tname, o)] + rel_name = mesonlib.relpath(fullpath, self.environment.get_source_dir()) + o_dict = PbxDict() + name = os.path.basename(o) + objects_dict.add_item(idval, o_dict, fullpath) + o_dict.add_item('isa', 'PBXFileReference') + o_dict.add_item('explicitFileType', '"' + self.get_xcodetype(o) + '"') + o_dict.add_item('fileEncoding', '4') + o_dict.add_item('name', f'"{name}"') + o_dict.add_item('path', f'"{rel_name}"') + o_dict.add_item('sourceTree', 'SOURCE_ROOT') + for tname, idval in self.target_filemap.items(): + target_dict = PbxDict() + objects_dict.add_item(idval, target_dict, tname) + t = self.build_targets[tname] + fname = t.get_filename() + reftype = 0 + if isinstance(t, build.Executable): + typestr = 'compiled.mach-o.executable' + path = fname + elif isinstance(t, build.SharedLibrary): + typestr = self.get_xcodetype('dummy.dylib') + path = fname + else: + typestr = self.get_xcodetype(fname) + path = '"%s"' % t.get_filename() + target_dict.add_item('isa', 'PBXFileReference') + target_dict.add_item('explicitFileType', '"' + typestr + '"') + if ' ' in path and path[0] != '"': + target_dict.add_item('path', f'"{path}"') + else: + target_dict.add_item('path', path) + target_dict.add_item('refType', reftype) + target_dict.add_item('sourceTree', 'BUILT_PRODUCTS_DIR') + + for tname, t in self.custom_targets.items(): + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t) + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + elif isinstance(s, str): + s = os.path.joni(t.subdir, s) + else: + continue + custom_dict = PbxDict() + typestr = self.get_xcodetype(s) + custom_dict.add_item('isa', 'PBXFileReference') + custom_dict.add_item('explicitFileType', '"' + typestr + '"') + custom_dict.add_item('name', f'"{s}"') + custom_dict.add_item('path', f'"{s}"') + custom_dict.add_item('refType', 0) + custom_dict.add_item('sourceTree', 'SOURCE_ROOT') + objects_dict.add_item(self.fileref_ids[(tname, s)], custom_dict) + for o in ofilenames: + custom_dict = PbxDict() + typestr = self.get_xcodetype(o) + custom_dict.add_item('isa', 'PBXFileReference') + custom_dict.add_item('explicitFileType', '"' + typestr + '"') + custom_dict.add_item('name', o) + custom_dict.add_item('path', os.path.join(self.src_to_build, o)) + custom_dict.add_item('refType', 0) + custom_dict.add_item('sourceTree', 'SOURCE_ROOT') + objects_dict.add_item(self.custom_target_output_fileref[o], custom_dict) + + for buildfile in self.interpreter.get_build_def_files(): + basename = os.path.split(buildfile)[1] + buildfile_dict = PbxDict() + typestr = self.get_xcodetype(buildfile) + buildfile_dict.add_item('isa', 'PBXFileReference') + buildfile_dict.add_item('explicitFileType', '"' + typestr + '"') + buildfile_dict.add_item('name', f'"{basename}"') + buildfile_dict.add_item('path', f'"{buildfile}"') + buildfile_dict.add_item('refType', 0) + buildfile_dict.add_item('sourceTree', 'SOURCE_ROOT') + objects_dict.add_item(self.fileref_ids[buildfile], buildfile_dict) + + def generate_pbx_frameworks_buildphase(self, objects_dict): + for t in self.build_targets.values(): + bt_dict = PbxDict() + objects_dict.add_item(t.buildphasemap['Frameworks'], bt_dict, 'Frameworks') + bt_dict.add_item('isa', 'PBXFrameworksBuildPhase') + bt_dict.add_item('buildActionMask', 2147483647) + file_list = PbxArray() + bt_dict.add_item('files', file_list) + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + file_list.add_item(self.native_frameworks[f], f'{f}.framework in Frameworks') + bt_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + + def generate_pbx_group(self, objects_dict): + groupmap = {} + target_src_map = {} + for t in self.build_targets: + groupmap[t] = self.gen_id() + target_src_map[t] = self.gen_id() + for t in self.custom_targets: + groupmap[t] = self.gen_id() + target_src_map[t] = self.gen_id() + projecttree_id = self.gen_id() + resources_id = self.gen_id() + products_id = self.gen_id() + frameworks_id = self.gen_id() + main_dict = PbxDict() + objects_dict.add_item(self.maingroup_id, main_dict) + main_dict.add_item('isa', 'PBXGroup') + main_children = PbxArray() + main_dict.add_item('children', main_children) + main_children.add_item(projecttree_id, 'Project tree') + main_children.add_item(resources_id, 'Resources') + main_children.add_item(products_id, 'Products') + main_children.add_item(frameworks_id, 'Frameworks') + main_dict.add_item('sourceTree', '""') + + self.add_projecttree(objects_dict, projecttree_id) + + resource_dict = PbxDict() + objects_dict.add_item(resources_id, resource_dict, 'Resources') + resource_dict.add_item('isa', 'PBXGroup') + resource_children = PbxArray() + resource_dict.add_item('children', resource_children) + resource_dict.add_item('name', 'Resources') + resource_dict.add_item('sourceTree', '""') + + frameworks_dict = PbxDict() + objects_dict.add_item(frameworks_id, frameworks_dict, 'Frameworks') + frameworks_dict.add_item('isa', 'PBXGroup') + frameworks_children = PbxArray() + frameworks_dict.add_item('children', frameworks_children) + # write frameworks + + for t in self.build_targets.values(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + frameworks_children.add_item(self.native_frameworks_fileref[f], f) + + frameworks_dict.add_item('name', 'Frameworks') + frameworks_dict.add_item('sourceTree', '""') + + for tname, t in self.custom_targets.items(): + target_dict = PbxDict() + objects_dict.add_item(groupmap[tname], target_dict, tname) + target_dict.add_item('isa', 'PBXGroup') + target_children = PbxArray() + target_dict.add_item('children', target_children) + target_children.add_item(target_src_map[tname], 'Source files') + if t.subproject: + target_dict.add_item('name', f'"{t.subproject} • {t.name}"') + else: + target_dict.add_item('name', f'"{t.name}"') + target_dict.add_item('sourceTree', '""') + source_files_dict = PbxDict() + objects_dict.add_item(target_src_map[tname], source_files_dict, 'Source files') + source_files_dict.add_item('isa', 'PBXGroup') + source_file_children = PbxArray() + source_files_dict.add_item('children', source_file_children) + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + elif isinstance(s, str): + s = os.path.joni(t.subdir, s) + else: + continue + source_file_children.add_item(self.fileref_ids[(tname, s)], s) + source_files_dict.add_item('name', '"Source files"') + source_files_dict.add_item('sourceTree', '""') + + # And finally products + product_dict = PbxDict() + objects_dict.add_item(products_id, product_dict, 'Products') + product_dict.add_item('isa', 'PBXGroup') + product_children = PbxArray() + product_dict.add_item('children', product_children) + for t in self.build_targets: + product_children.add_item(self.target_filemap[t], t) + product_dict.add_item('name', 'Products') + product_dict.add_item('sourceTree', '""') + + def write_group_target_entry(self, objects_dict, t): + tid = t.get_id() + group_id = self.gen_id() + target_dict = PbxDict() + objects_dict.add_item(group_id, target_dict, tid) + target_dict.add_item('isa', 'PBXGroup') + target_children = PbxArray() + target_dict.add_item('children', target_children) + target_dict.add_item('name', f'"{t} · target"') + target_dict.add_item('sourceTree', '""') + source_files_dict = PbxDict() + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + elif isinstance(s, str): + s = os.path.joni(t.subdir, s) + else: + continue + target_children.add_item(self.fileref_ids[(tid, s)], s) + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Do not show built object files in the project tree. + continue + if isinstance(o, mesonlib.File): + o = os.path.join(o.subdir, o.fname) + else: + o = os.path.join(t.subdir, o) + target_children.add_item(self.fileref_ids[(tid, o)], o) + source_files_dict.add_item('name', '"Source files"') + source_files_dict.add_item('sourceTree', '""') + return group_id + + def add_projecttree(self, objects_dict, projecttree_id): + root_dict = PbxDict() + objects_dict.add_item(projecttree_id, root_dict, "Root of project tree") + root_dict.add_item('isa', 'PBXGroup') + target_children = PbxArray() + root_dict.add_item('children', target_children) + root_dict.add_item('name', '"Project root"') + root_dict.add_item('sourceTree', '""') + + project_tree = self.generate_project_tree() + self.write_tree(objects_dict, project_tree, target_children, '') + + def write_tree(self, objects_dict, tree_node, children_array, current_subdir): + subdir_dict = PbxDict() + subdir_children = PbxArray() + for subdir_name, subdir_node in tree_node.subdirs.items(): + subdir_id = self.gen_id() + objects_dict.add_item(subdir_id, subdir_dict) + children_array.add_item(subdir_id) + subdir_dict.add_item('isa', 'PBXGroup') + subdir_dict.add_item('children', subdir_children) + subdir_dict.add_item('name', f'"{subdir_name}"') + subdir_dict.add_item('sourceTree', '""') + self.write_tree(objects_dict, subdir_node, subdir_children, os.path.join(current_subdir, subdir_name)) + for target in tree_node.targets: + group_id = self.write_group_target_entry(objects_dict, target) + children_array.add_item(group_id) + potentials = [os.path.join(current_subdir, 'meson.build'), + os.path.join(current_subdir, 'meson_options.txt')] + for bf in potentials: + i = self.fileref_ids.get(bf, None) + if i: + children_array.add_item(i) + + + def generate_project_tree(self): + tree_info = FileTreeEntry() + for tname, t in self.build_targets.items(): + self.add_target_to_tree(tree_info, t) + return tree_info + + def add_target_to_tree(self, tree_root, t): + current_node = tree_root + path_segments = t.subdir.split('/') + for s in path_segments: + if not s: + continue + if s not in current_node.subdirs: + current_node.subdirs[s] = FileTreeEntry() + current_node = current_node.subdirs[s] + current_node.targets.append(t) + + def generate_pbx_native_target(self, objects_dict): + for tname, idval in self.native_targets.items(): + ntarget_dict = PbxDict() + t = self.build_targets[tname] + objects_dict.add_item(idval, ntarget_dict, tname) + ntarget_dict.add_item('isa', 'PBXNativeTarget') + ntarget_dict.add_item('buildConfigurationList', self.buildconflistmap[tname], f'Build configuration list for PBXNativeTarget "{tname}"') + buildphases_array = PbxArray() + ntarget_dict.add_item('buildPhases', buildphases_array) + generator_id = 0 + for g in t.generated: + # Custom target are handled via inter-target dependencies. + # Generators are built as a shellscriptbuildphase. + if isinstance(g, build.GeneratedList): + buildphases_array.add_item(self.shell_targets[(tname, generator_id)], f'Generator {generator_id}/{tname}') + generator_id += 1 + for bpname, bpval in t.buildphasemap.items(): + buildphases_array.add_item(bpval, f'{bpname} yyy') + ntarget_dict.add_item('buildRules', PbxArray()) + dep_array = PbxArray() + ntarget_dict.add_item('dependencies', dep_array) + dep_array.add_item(self.regen_dependency_id) + # These dependencies only tell Xcode that the deps must be built + # before this one. They don't set up linkage or anything + # like that. Those are set up in the XCBuildConfiguration. + for lt in self.build_targets[tname].link_targets: + # NOT DOCUMENTED, may need to make different links + # to same target have different targetdependency item. + if isinstance(lt, build.CustomTarget): + dep_array.add_item(self.pbx_custom_dep_map[lt.get_id()], lt.name) + elif isinstance(lt, build.CustomTargetIndex): + dep_array.add_item(self.pbx_custom_dep_map[lt.target.get_id()], lt.target.name) + else: + idval = self.pbx_dep_map[lt.get_id()] + dep_array.add_item(idval, 'PBXTargetDependency') + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + source_target_id = o.target.get_id() + idval = self.pbx_dep_map[source_target_id] + dep_array.add_item(idval, 'PBXTargetDependency') + generator_id = 0 + for o in t.generated: + if isinstance(o, build.CustomTarget): + dep_array.add_item(self.pbx_custom_dep_map[o.get_id()], o.name) + elif isinstance(o, build.CustomTargetIndex): + dep_array.add_item(self.pbx_custom_dep_map[o.target.get_id()], o.target.name) + + generator_id += 1 + + ntarget_dict.add_item('name', f'"{tname}"') + ntarget_dict.add_item('productName', f'"{tname}"') + ntarget_dict.add_item('productReference', self.target_filemap[tname], tname) + if isinstance(t, build.Executable): + typestr = 'com.apple.product-type.tool' + elif isinstance(t, build.StaticLibrary): + typestr = 'com.apple.product-type.library.static' + elif isinstance(t, build.SharedLibrary): + typestr = 'com.apple.product-type.library.dynamic' + else: + raise MesonException('Unknown target type for %s' % tname) + ntarget_dict.add_item('productType', f'"{typestr}"') + + def generate_pbx_project(self, objects_dict): + project_dict = PbxDict() + objects_dict.add_item(self.project_uid, project_dict, 'Project object') + project_dict.add_item('isa', 'PBXProject') + attr_dict = PbxDict() + project_dict.add_item('attributes', attr_dict) + attr_dict.add_item('BuildIndependentTargetsInParallel', 'YES') + project_dict.add_item('buildConfigurationList', self.project_conflist, f'Build configuration list for PBXProject "{self.build.project_name}"') + project_dict.add_item('buildSettings', PbxDict()) + style_arr = PbxArray() + project_dict.add_item('buildStyles', style_arr) + for name, idval in self.buildstylemap.items(): + style_arr.add_item(idval, name) + project_dict.add_item('compatibilityVersion', '"Xcode 3.2"') + project_dict.add_item('hasScannedForEncodings', 0) + project_dict.add_item('mainGroup', self.maingroup_id) + project_dict.add_item('projectDirPath', '"' + self.environment.get_source_dir() + '"') + project_dict.add_item('projectRoot', '""') + targets_arr = PbxArray() + project_dict.add_item('targets', targets_arr) + targets_arr.add_item(self.all_id, 'ALL_BUILD') + targets_arr.add_item(self.test_id, 'RUN_TESTS') + targets_arr.add_item(self.regen_id, 'REGENERATE') + for t in self.build_targets: + targets_arr.add_item(self.native_targets[t], t) + for t in self.custom_targets: + targets_arr.add_item(self.custom_aggregate_targets[t], t) + + def generate_pbx_shell_build_phase(self, objects_dict): + self.generate_test_shell_build_phase(objects_dict) + self.generate_regen_shell_build_phase(objects_dict) + self.generate_custom_target_shell_build_phases(objects_dict) + self.generate_generator_target_shell_build_phases(objects_dict) + + def generate_test_shell_build_phase(self, objects_dict): + shell_dict = PbxDict() + objects_dict.add_item(self.test_command_id, shell_dict, 'ShellScript') + shell_dict.add_item('isa', 'PBXShellScriptBuildPhase') + shell_dict.add_item('buildActionMask', 2147483647) + shell_dict.add_item('files', PbxArray()) + shell_dict.add_item('inputPaths', PbxArray()) + shell_dict.add_item('outputPaths', PbxArray()) + shell_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + shell_dict.add_item('shellPath', '/bin/sh') + cmd = mesonlib.get_meson_command() + ['test', '--no-rebuild', '-C', self.environment.get_build_dir()] + cmdstr = ' '.join(["'%s'" % i for i in cmd]) + shell_dict.add_item('shellScript', f'"{cmdstr}"') + shell_dict.add_item('showEnvVarsInLog', 0) + + def generate_regen_shell_build_phase(self, objects_dict): + shell_dict = PbxDict() + objects_dict.add_item(self.regen_command_id, shell_dict, 'ShellScript') + shell_dict.add_item('isa', 'PBXShellScriptBuildPhase') + shell_dict.add_item('buildActionMask', 2147483647) + shell_dict.add_item('files', PbxArray()) + shell_dict.add_item('inputPaths', PbxArray()) + shell_dict.add_item('outputPaths', PbxArray()) + shell_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + shell_dict.add_item('shellPath', '/bin/sh') + cmd = mesonlib.get_meson_command() + ['--internal', 'regencheck', os.path.join(self.environment.get_build_dir(), 'meson-private')] + cmdstr = ' '.join(["'%s'" % i for i in cmd]) + shell_dict.add_item('shellScript', f'"{cmdstr}"') + shell_dict.add_item('showEnvVarsInLog', 0) + + def generate_custom_target_shell_build_phases(self, objects_dict): + # Custom targets are shell build phases in Xcode terminology. + for tname, t in self.custom_targets.items(): + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t, absolute_outputs=True) + fixed_cmd, _ = self.as_meson_exe_cmdline(t.name, + cmd[0], + cmd[1:], + #workdir=None, + env=t.env) + custom_dict = PbxDict() + objects_dict.add_item(self.shell_targets[tname], custom_dict, f'/* Custom target {tname} */') + custom_dict.add_item('isa', 'PBXShellScriptBuildPhase') + custom_dict.add_item('buildActionMask', 2147483647) + custom_dict.add_item('files', PbxArray()) + custom_dict.add_item('inputPaths', PbxArray()) + outarray = PbxArray() + custom_dict.add_item('name', '"Generate {}."'.format(ofilenames[0])) + custom_dict.add_item('outputPaths', outarray) + for o in ofilenames: + outarray.add_item(os.path.join(self.environment.get_build_dir(), o)) + custom_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + custom_dict.add_item('shellPath', '/bin/sh') + workdir = self.environment.get_build_dir() + quoted_cmd = [] + for c in fixed_cmd: + quoted_cmd.append(c.replace('"', chr(92) + '"')) + cmdstr = ' '.join([f"\\'{x}\\'" for x in quoted_cmd]) + custom_dict.add_item('shellScript', f'"cd {workdir}; {cmdstr}"') + custom_dict.add_item('showEnvVarsInLog', 0) + + def generate_generator_target_shell_build_phases(self, objects_dict): + for tname, t in self.build_targets.items(): + generator_id = 0 + for genlist in t.generated: + if isinstance(genlist, build.GeneratedList): + self.generate_single_generator_phase(tname, t, genlist, generator_id, objects_dict) + generator_id += 1 + for tname, t in self.custom_targets.items(): + generator_id = 0 + for genlist in t.sources: + if isinstance(genlist, build.GeneratedList): + self.generate_single_generator_phase(tname, t, genlist, generator_id, objects_dict) + generator_id += 1 + + def generate_single_generator_phase(self, tname, t, genlist, generator_id, objects_dict): + generator = genlist.get_generator() + exe = generator.get_exe() + exe_arr = self.build_target_to_cmd_array(exe) + workdir = self.environment.get_build_dir() + gen_dict = PbxDict() + objects_dict.add_item(self.shell_targets[(tname, generator_id)], gen_dict, f'"Generator {generator_id}/{tname}"') + infilelist = genlist.get_inputs() + outfilelist = genlist.get_outputs() + gen_dict.add_item('isa', 'PBXShellScriptBuildPhase') + gen_dict.add_item('buildActionMask', 2147483647) + gen_dict.add_item('files', PbxArray()) + gen_dict.add_item('inputPaths', PbxArray()) + gen_dict.add_item('name', f'"Generator {generator_id}/{tname}"') + commands = [["cd", workdir]] # Array of arrays, each one a single command, will get concatenated below. + k = (tname, generator_id) + ofile_abs = self.generator_outputs[k] + outarray = PbxArray() + gen_dict.add_item('outputPaths', outarray) + for of in ofile_abs: + outarray.add_item(of) + for i in infilelist: + # This might be needed to be added to inputPaths. It's not done yet as it is + # unclear whether it is necessary, what actually happens when it is defined + # and currently the build works without it. + #infile_abs = i.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + infilename = i.rel_to_builddir(self.build_to_src) + base_args = generator.get_arglist(infilename) + for o_base in genlist.get_outputs_for(i): + o = os.path.join(self.get_target_private_dir(t), o_base) + args = [] + for arg in base_args: + arg = arg.replace("@INPUT@", infilename) + arg = arg.replace('@OUTPUT@', o).replace('@BUILD_DIR@', self.get_target_private_dir(t)) + arg = arg.replace("@CURRENT_SOURCE_DIR@", os.path.join(self.build_to_src, t.subdir)) + args.append(arg) + args = self.replace_outputs(args, self.get_target_private_dir(t), outfilelist) + args = self.replace_extra_args(args, genlist) + if generator.capture: + # When capturing, stdout is the output. Forward it with the shell. + full_command = ['('] + exe_arr + args + ['>', o, ')'] + else: + full_command = exe_arr + args + commands.append(full_command) + gen_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + gen_dict.add_item('shellPath', '/bin/sh') + quoted_cmds = [] + for cmnd in commands: + q = [] + for c in cmnd: + if ' ' in c: + q.append(f'\\"{c}\\"') + else: + q.append(c) + quoted_cmds.append(' '.join(q)) + cmdstr = '"' + ' && '.join(quoted_cmds) + '"' + gen_dict.add_item('shellScript', cmdstr) + gen_dict.add_item('showEnvVarsInLog', 0) + + + def generate_pbx_sources_build_phase(self, objects_dict): + for name in self.source_phase.keys(): + phase_dict = PbxDict() + t = self.build_targets[name] + objects_dict.add_item(t.buildphasemap[name], phase_dict, 'Sources') + phase_dict.add_item('isa', 'PBXSourcesBuildPhase') + phase_dict.add_item('buildActionMask', 2147483647) + file_arr = PbxArray() + phase_dict.add_item('files', file_arr) + for s in self.build_targets[name].sources: + s = os.path.join(s.subdir, s.fname) + if not self.environment.is_header(s): + file_arr.add_item(self.buildfile_ids[(name, s)], os.path.join(self.environment.get_source_dir(), s)) + generator_id = 0 + for gt in t.generated: + if isinstance(gt, build.CustomTarget): + (srcs, ofilenames, cmd) = self.eval_custom_target_command(gt) + for o in ofilenames: + file_arr.add_item(self.custom_target_output_buildfile[o], + os.path.join(self.environment.get_build_dir(), o)) + elif isinstance(gt, build.CustomTargetIndex): + for o in gt.get_outputs(): + file_arr.add_item(self.custom_target_output_buildfile[o], + os.path.join(self.environment.get_build_dir(), o)) + elif isinstance(gt, build.GeneratedList): + genfiles = self.generator_buildfile_ids[(name, generator_id)] + generator_id += 1 + for o in genfiles: + file_arr.add_item(o) + else: + raise RuntimeError('Unknown input type: ' + str(gt)) + phase_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + + def generate_pbx_target_dependency(self, objects_dict): + all_dict = PbxDict() + objects_dict.add_item(self.build_all_tdep_id, all_dict, 'ALL_BUILD') + all_dict.add_item('isa', 'PBXTargetDependency') + all_dict.add_item('target', self.all_id) + targets = [] + targets.append((self.regen_dependency_id, self.regen_id, 'REGEN', None)) + for t in self.build_targets: + idval = self.pbx_dep_map[t] # VERIFY: is this correct? + targets.append((idval, self.native_targets[t], t, self.containerproxy_map[t])) + + for t in self.custom_targets: + idval = self.pbx_custom_dep_map[t] + targets.append((idval, self.custom_aggregate_targets[t], t, None))#self.containerproxy_map[t])) + + # Sort object by ID + sorted_targets = sorted(targets, key=operator.itemgetter(0)) + for t in sorted_targets: + t_dict = PbxDict() + objects_dict.add_item(t[0], t_dict, 'PBXTargetDependency') + t_dict.add_item('isa', 'PBXTargetDependency') + t_dict.add_item('target', t[1], t[2]) + if t[3] is not None: + t_dict.add_item('targetProxy', t[3], 'PBXContainerItemProxy') + + def generate_xc_build_configuration(self, objects_dict): + # First the setup for the toplevel project. + for buildtype in self.buildtypes: + bt_dict = PbxDict() + objects_dict.add_item(self.project_configurations[buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('ARCHS', '"$(NATIVE_ARCH_ACTUAL)"') + settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES') + settings_dict.add_item('SWIFT_VERSION', '5.0') + settings_dict.add_item('SDKROOT', '"macosx"') + settings_dict.add_item('SYMROOT', '"%s/build"' % self.environment.get_build_dir()) + bt_dict.add_item('name', f'"{buildtype}"') + + # Then the all target. + for buildtype in self.buildtypes: + bt_dict = PbxDict() + objects_dict.add_item(self.buildall_configurations[buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('SYMROOT', '"%s"' % self.environment.get_build_dir()) + warn_array = PbxArray() + warn_array.add_item('"$(inherited)"') + settings_dict.add_item('WARNING_CFLAGS', warn_array) + + bt_dict.add_item('name', f'"{buildtype}"') + + # Then the test target. + for buildtype in self.buildtypes: + bt_dict = PbxDict() + objects_dict.add_item(self.test_configurations[buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('SYMROOT', '"%s"' % self.environment.get_build_dir()) + warn_array = PbxArray() + settings_dict.add_item('WARNING_CFLAGS', warn_array) + warn_array.add_item('"$(inherited)"') + bt_dict.add_item('name', f'"{buildtype}"') + + # Now finally targets. + for target_name, target in self.build_targets.items(): + self.generate_single_build_target(objects_dict, target_name, target) + + for target_name, target in self.custom_targets.items(): + bt_dict = PbxDict() + objects_dict.add_item(self.buildconfmap[target_name][buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('ARCHS', '"$(NATIVE_ARCH_ACTUAL)"') + settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES') + settings_dict.add_item('SDKROOT', '"macosx"') + settings_dict.add_item('SYMROOT', '"%s/build"' % self.environment.get_build_dir()) + bt_dict.add_item('name', f'"{buildtype}"') + + + def determine_internal_dep_link_args(self, target, buildtype): + links_dylib = False + dep_libs = [] + for l in target.link_targets: + if isinstance(target, build.SharedModule) and isinstance(l, build.Executable): + continue + if isinstance(l, build.CustomTargetIndex): + rel_dir = self.get_custom_target_output_dir(l.target) + libname = l.get_filename() + elif isinstance(l, build.CustomTarget): + rel_dir = self.get_custom_target_output_dir(l) + libname = l.get_filename() + else: + rel_dir = self.get_target_dir(l) + libname = l.get_filename() + abs_path = os.path.join(self.environment.get_build_dir(), rel_dir, libname) + dep_libs.append("'%s'" % abs_path) + if isinstance(l, build.SharedLibrary): + links_dylib = True + if isinstance(l, build.StaticLibrary): + (sub_libs, sub_links_dylib) = self.determine_internal_dep_link_args(l, buildtype) + dep_libs += sub_libs + links_dylib = links_dylib or sub_links_dylib + return (dep_libs, links_dylib) + + def generate_single_build_target(self, objects_dict, target_name, target): + for buildtype in self.buildtypes: + dep_libs = [] + links_dylib = False + headerdirs = [] + for d in target.include_dirs: + for sd in d.incdirs: + cd = os.path.join(d.curdir, sd) + headerdirs.append(os.path.join(self.environment.get_source_dir(), cd)) + headerdirs.append(os.path.join(self.environment.get_build_dir(), cd)) + for extra in d.extra_build_dirs: + headerdirs.append(os.path.join(self.environment.get_build_dir(), extra)) + (dep_libs, links_dylib) = self.determine_internal_dep_link_args(target, buildtype) + if links_dylib: + dep_libs = ['-Wl,-search_paths_first', '-Wl,-headerpad_max_install_names'] + dep_libs + dylib_version = None + if isinstance(target, build.SharedLibrary): + if isinstance(target, build.SharedModule): + ldargs = [] + else: + ldargs = ['-dynamiclib'] + ldargs += ['-Wl,-headerpad_max_install_names'] + dep_libs + install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype) + dylib_version = target.soversion + else: + ldargs = dep_libs + install_path = '' + if dylib_version is not None: + product_name = target.get_basename() + '.' + dylib_version + else: + product_name = target.get_basename() + ldargs += target.link_args + # Swift is special. Again. You can't mix Swift with other languages + # in the same target. Thus for Swift we only use + if self.is_swift_target(target): + linker, stdlib_args = target.compilers['swift'], [] + else: + linker, stdlib_args = self.determine_linker_and_stdlib_args(target) + if not isinstance(target, build.StaticLibrary): + ldargs += self.build.get_project_link_args(linker, target.subproject, target.for_machine) + ldargs += self.build.get_global_link_args(linker, target.for_machine) + cargs = [] + for dep in target.get_external_deps(): + cargs += dep.get_compile_args() + ldargs += dep.get_link_args() + for o in target.objects: + # Add extracted objects to the link line by hand. + if isinstance(o, build.ExtractedObjects): + added_objs = set() + for objname_rel in o.get_outputs(self): + objname_abs = os.path.join(self.environment.get_build_dir(), o.target.subdir, objname_rel) + if objname_abs not in added_objs: + added_objs.add(objname_abs) + ldargs += [r'\"' + objname_abs + r'\"'] + generator_id = 0 + for o in target.generated: + if isinstance(o, build.GeneratedList): + outputs = self.generator_outputs[target_name, generator_id] + generator_id += 1 + for o_abs in outputs: + if o_abs.endswith('.o') or o_abs.endswith('.obj'): + ldargs += [r'\"' + o_abs + r'\"'] + else: + if isinstance(o, build.CustomTarget): + (srcs, ofilenames, cmd) = self.eval_custom_target_command(o) + for ofname in ofilenames: + if os.path.splitext(ofname)[-1] in LINKABLE_EXTENSIONS: + ldargs += [r'\"' + os.path.join(self.environment.get_build_dir(), ofname) + r'\"'] + elif isinstance(o, build.CustomTargetIndex): + for ofname in o.get_outputs(): + if os.path.splitext(ofname)[-1] in LINKABLE_EXTENSIONS: + ldargs += [r'\"' + os.path.join(self.environment.get_build_dir(), ofname) + r'\"'] + else: + raise RuntimeError(o) + if isinstance(target, build.SharedModule): + options = self.environment.coredata.options + ldargs += linker.get_std_shared_module_link_args(options) + elif isinstance(target, build.SharedLibrary): + ldargs += linker.get_std_shared_lib_link_args() + ldstr = ' '.join(ldargs) + valid = self.buildconfmap[target_name][buildtype] + langargs = {} + for lang in self.environment.coredata.compilers[target.for_machine]: + if lang not in LANGNAMEMAP: + continue + compiler = target.compilers.get(lang) + if compiler is None: + continue + # Start with warning args + warn_args = compiler.get_warn_args(self.get_option_for_target(OptionKey('warning_level'), target)) + copt_proxy = self.get_compiler_options_for_target(target) + std_args = compiler.get_option_compile_args(copt_proxy) + # Add compile args added using add_project_arguments() + pargs = self.build.projects_args[target.for_machine].get(target.subproject, {}).get(lang, []) + # Add compile args added using add_global_arguments() + # These override per-project arguments + gargs = self.build.global_args[target.for_machine].get(lang, []) + targs = target.get_extra_args(lang) + args = warn_args + std_args + pargs + gargs + targs + if lang == 'swift': + # For some reason putting Swift module dirs in HEADER_SEARCH_PATHS does not work, + # but adding -I/path to manual args does work. + swift_dep_dirs = self.determine_swift_dep_dirs(target) + for d in swift_dep_dirs: + args += compiler.get_include_args(d, False) + if args: + lang_cargs = cargs + if compiler and target.implicit_include_directories: + # It is unclear what is the cwd when xcode runs. -I. does not seem to + # add the root build dir to the search path. So add an absolute path instead. + # This may break reproducible builds, in which case patches are welcome. + lang_cargs += self.get_custom_target_dir_include_args(target, compiler, absolute_path=True) + # Xcode can not handle separate compilation flags for C and ObjectiveC. They are both + # put in OTHER_CFLAGS. Same with C++ and ObjectiveC++. + if lang == 'objc': + lang = 'c' + elif lang == 'objcpp': + lang = 'cpp' + langname = LANGNAMEMAP[lang] + if langname in langargs: + langargs[langname] += args + else: + langargs[langname] = args + langargs[langname] += lang_cargs + symroot = os.path.join(self.environment.get_build_dir(), target.subdir) + bt_dict = PbxDict() + objects_dict.add_item(valid, bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('COMBINE_HIDPI_IMAGES', 'YES') + if isinstance(target, build.SharedModule): + settings_dict.add_item('DYLIB_CURRENT_VERSION', '""') + settings_dict.add_item('DYLIB_COMPATIBILITY_VERSION', '""') + else: + if dylib_version is not None: + settings_dict.add_item('DYLIB_CURRENT_VERSION', f'"{dylib_version}"') + if target.prefix: + settings_dict.add_item('EXECUTABLE_PREFIX', target.prefix) + if target.suffix: + suffix = '.' + target.suffix + settings_dict.add_item('EXECUTABLE_SUFFIX', suffix) + settings_dict.add_item('GCC_GENERATE_DEBUGGING_SYMBOLS', BOOL2XCODEBOOL[self.get_option_for_target(OptionKey('debug'), target)]) + settings_dict.add_item('GCC_INLINES_ARE_PRIVATE_EXTERN', 'NO') + settings_dict.add_item('GCC_OPTIMIZATION_LEVEL', OPT2XCODEOPT[self.get_option_for_target(OptionKey('optimization'), target)]) + if target.has_pch: + # Xcode uses GCC_PREFIX_HEADER which only allows one file per target/executable. Precompiling various header files and + # applying a particular pch to each source file will require custom scripts (as a build phase) and build flags per each + # file. Since Xcode itself already discourages precompiled headers in favor of modules we don't try much harder here. + pchs = target.get_pch('c') + target.get_pch('cpp') + target.get_pch('objc') + target.get_pch('objcpp') + # Make sure to use headers (other backends require implementation files like *.c *.cpp, etc; these should not be used here) + pchs = [pch for pch in pchs if pch.endswith('.h') or pch.endswith('.hh') or pch.endswith('hpp')] + if pchs: + if len(pchs) > 1: + mlog.warning(f'Unsupported Xcode configuration: More than 1 precompiled header found "{pchs!s}". Target "{target.name}" might not compile correctly.') + relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)" + settings_dict.add_item('GCC_PRECOMPILE_PREFIX_HEADER', 'YES') + settings_dict.add_item('GCC_PREFIX_HEADER', f'"$(PROJECT_DIR)/{relative_pch_path}"') + settings_dict.add_item('GCC_PREPROCESSOR_DEFINITIONS', '""') + settings_dict.add_item('GCC_SYMBOLS_PRIVATE_EXTERN', 'NO') + header_arr = PbxArray() + unquoted_headers = [] + unquoted_headers.append(self.get_target_private_dir_abs(target)) + if target.implicit_include_directories: + unquoted_headers.append(os.path.join(self.environment.get_build_dir(), target.get_subdir())) + unquoted_headers.append(os.path.join(self.environment.get_source_dir(), target.get_subdir())) + if headerdirs: + for i in headerdirs: + i = os.path.normpath(i) + unquoted_headers.append(i) + for i in unquoted_headers: + header_arr.add_item(f'"\\"{i}\\""') + settings_dict.add_item('HEADER_SEARCH_PATHS', header_arr) + settings_dict.add_item('INSTALL_PATH', f'"{install_path}"') + settings_dict.add_item('LIBRARY_SEARCH_PATHS', '""') + if isinstance(target, build.SharedModule): + settings_dict.add_item('LIBRARY_STYLE', 'BUNDLE') + settings_dict.add_item('MACH_O_TYPE', 'mh_bundle') + elif isinstance(target, build.SharedLibrary): + settings_dict.add_item('LIBRARY_STYLE', 'DYNAMIC') + self.add_otherargs(settings_dict, langargs) + settings_dict.add_item('OTHER_LDFLAGS', f'"{ldstr}"') + settings_dict.add_item('OTHER_REZFLAGS', '""') + if ' ' in product_name: + settings_dict.add_item('PRODUCT_NAME', f'"{product_name}"') + else: + settings_dict.add_item('PRODUCT_NAME', product_name) + settings_dict.add_item('SECTORDER_FLAGS', '""') + settings_dict.add_item('SYMROOT', f'"{symroot}"') + sysheader_arr = PbxArray() + # XCode will change every -I flag that points inside these directories + # to an -isystem. Thus set nothing in it since we control our own + # include flags. + settings_dict.add_item('SYSTEM_HEADER_SEARCH_PATHS', sysheader_arr) + settings_dict.add_item('USE_HEADERMAP', 'NO') + warn_array = PbxArray() + settings_dict.add_item('WARNING_CFLAGS', warn_array) + warn_array.add_item('"$(inherited)"') + bt_dict.add_item('name', buildtype) + + def add_otherargs(self, settings_dict, langargs): + for langname, args in langargs.items(): + if args: + quoted_args = [] + for a in args: + # This works but + # a) it's ugly as sin + # b) I don't know why it works or why every backslash must be escaped into eight backslashes + a = a.replace(chr(92), 8*chr(92)) # chr(92) is backslash, this how we smuggle it in without Python's quoting grabbing it. + a = a.replace(r'"', r'\\\"') + if ' ' in a or "'" in a: + a = r'\"' + a + r'\"' + quoted_args.append(a) + settings_dict.add_item(f'OTHER_{langname}FLAGS', '"' + ' '.join(quoted_args) + '"') + + def generate_xc_configurationList(self, objects_dict): + # FIXME: sort items + conf_dict = PbxDict() + objects_dict.add_item(self.project_conflist, conf_dict, f'Build configuration list for PBXProject "{self.build.project_name}"') + conf_dict.add_item('isa', 'XCConfigurationList') + confs_arr = PbxArray() + conf_dict.add_item('buildConfigurations', confs_arr) + for buildtype in self.buildtypes: + confs_arr.add_item(self.project_configurations[buildtype], buildtype) + conf_dict.add_item('defaultConfigurationIsVisible', 0) + conf_dict.add_item('defaultConfigurationName', self.buildtype) + + # Now the all target + all_dict = PbxDict() + objects_dict.add_item(self.all_buildconf_id, all_dict, 'Build configuration list for PBXAggregateTarget "ALL_BUILD"') + all_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + all_dict.add_item('buildConfigurations', conf_arr) + for buildtype in self.buildtypes: + conf_arr.add_item(self.buildall_configurations[buildtype], buildtype) + all_dict.add_item('defaultConfigurationIsVisible', 0) + all_dict.add_item('defaultConfigurationName', self.buildtype) + + # Test target + test_dict = PbxDict() + objects_dict.add_item(self.test_buildconf_id, test_dict, 'Build configuration list for PBXAggregateTarget "RUN_TEST"') + test_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + test_dict.add_item('buildConfigurations', conf_arr) + for buildtype in self.buildtypes: + conf_arr.add_item(self.test_configurations[buildtype], buildtype) + test_dict.add_item('defaultConfigurationIsVisible', 0) + test_dict.add_item('defaultConfigurationName', self.buildtype) + + # Regen target + regen_dict = PbxDict() + objects_dict.add_item(self.regen_buildconf_id, test_dict, 'Build configuration list for PBXAggregateTarget "REGENERATE"') + regen_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + regen_dict.add_item('buildConfigurations', conf_arr) + for buildtype in self.buildtypes: + conf_arr.add_item(self.test_configurations[buildtype], buildtype) + regen_dict.add_item('defaultConfigurationIsVisible', 0) + regen_dict.add_item('defaultConfigurationName', self.buildtype) + + for target_name in self.build_targets: + t_dict = PbxDict() + listid = self.buildconflistmap[target_name] + objects_dict.add_item(listid, t_dict, f'Build configuration list for PBXNativeTarget "{target_name}"') + t_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + t_dict.add_item('buildConfigurations', conf_arr) + idval = self.buildconfmap[target_name][self.buildtype] + conf_arr.add_item(idval, self.buildtype) + t_dict.add_item('defaultConfigurationIsVisible', 0) + t_dict.add_item('defaultConfigurationName', self.buildtype) + + for target_name in self.custom_targets: + t_dict = PbxDict() + listid = self.buildconflistmap[target_name] + objects_dict.add_item(listid, t_dict, f'Build configuration list for PBXAggregateTarget "{target_name}"') + t_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + t_dict.add_item('buildConfigurations', conf_arr) + idval = self.buildconfmap[target_name][self.buildtype] + conf_arr.add_item(idval, self.buildtype) + t_dict.add_item('defaultConfigurationIsVisible', 0) + t_dict.add_item('defaultConfigurationName', self.buildtype) + + def generate_prefix(self, pbxdict): + pbxdict.add_item('archiveVersion', '1') + pbxdict.add_item('classes', PbxDict()) + pbxdict.add_item('objectVersion', '46') + objects_dict = PbxDict() + pbxdict.add_item('objects', objects_dict) + + return objects_dict + + def generate_suffix(self, pbxdict): + pbxdict.add_item('rootObject', self.project_uid, 'Project object') diff --git a/meson/mesonbuild/build.py b/meson/mesonbuild/build.py new file mode 100644 index 000000000..846de8d3b --- /dev/null +++ b/meson/mesonbuild/build.py @@ -0,0 +1,2686 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict +from functools import lru_cache +import copy +import hashlib +import itertools, pathlib +import os +import pickle +import re +import textwrap +import typing as T + +from . import environment +from . import dependencies +from . import mlog +from . import programs +from .mesonlib import ( + HoldableObject, SecondLevelHolder, + File, MesonException, MachineChoice, PerMachine, OrderedSet, listify, + extract_as_list, typeslistify, stringlistify, classify_unity_sources, + get_filenames_templates_dict, substitute_values, has_path_sep, + OptionKey, PerMachineDefaultable, + MesonBugException, FileOrString, +) +from .compilers import ( + Compiler, is_object, clink_langs, sort_clink, lang_suffixes, + is_known_suffix, detect_static_linker +) +from .linkers import StaticLinker +from .interpreterbase import FeatureNew + +if T.TYPE_CHECKING: + from ._typing import ImmutableListProtocol, ImmutableSetProtocol + from .interpreter.interpreter import Test, SourceOutputs, Interpreter + from .mesonlib import FileMode, FileOrString + from .modules import ModuleState + from .backend.backends import Backend + +pch_kwargs = {'c_pch', 'cpp_pch'} + +lang_arg_kwargs = { + 'c_args', + 'cpp_args', + 'cuda_args', + 'd_args', + 'd_import_dirs', + 'd_unittest', + 'd_module_versions', + 'd_debug', + 'fortran_args', + 'java_args', + 'objc_args', + 'objcpp_args', + 'rust_args', + 'vala_args', + 'cs_args', + 'cython_args', +} + +vala_kwargs = {'vala_header', 'vala_gir', 'vala_vapi'} +rust_kwargs = {'rust_crate_type'} +cs_kwargs = {'resources', 'cs_args'} + +buildtarget_kwargs = { + 'build_by_default', + 'build_rpath', + 'dependencies', + 'extra_files', + 'gui_app', + 'link_with', + 'link_whole', + 'link_args', + 'link_depends', + 'implicit_include_directories', + 'include_directories', + 'install', + 'install_rpath', + 'install_dir', + 'install_mode', + 'name_prefix', + 'name_suffix', + 'native', + 'objects', + 'override_options', + 'sources', + 'gnu_symbol_visibility', + 'link_language', + 'win_subsystem', +} + +known_build_target_kwargs = ( + buildtarget_kwargs | + lang_arg_kwargs | + pch_kwargs | + vala_kwargs | + rust_kwargs | + cs_kwargs) + +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} +known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} +known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'} +known_stlib_kwargs = known_build_target_kwargs | {'pic', 'prelink'} +known_jar_kwargs = known_exe_kwargs | {'main_class'} + +@lru_cache(maxsize=None) +def get_target_macos_dylib_install_name(ld) -> str: + name = ['@rpath/', ld.prefix, ld.name] + if ld.soversion is not None: + name.append('.' + ld.soversion) + name.append('.dylib') + return ''.join(name) + +class InvalidArguments(MesonException): + pass + +class DependencyOverride(HoldableObject): + def __init__(self, dep, node, explicit=True): + self.dep = dep + self.node = node + self.explicit = explicit + +class Headers(HoldableObject): + + def __init__(self, sources: T.List[File], install_subdir: T.Optional[str], + install_dir: T.Optional[str], install_mode: 'FileMode', + subproject: str): + self.sources = sources + self.install_subdir = install_subdir + self.custom_install_dir = install_dir + self.custom_install_mode = install_mode + self.subproject = subproject + + # TODO: we really don't need any of these methods, but they're preserved to + # keep APIs relying on them working. + + def set_install_subdir(self, subdir: str) -> None: + self.install_subdir = subdir + + def get_install_subdir(self) -> T.Optional[str]: + return self.install_subdir + + def get_sources(self) -> T.List[File]: + return self.sources + + def get_custom_install_dir(self) -> T.Optional[str]: + return self.custom_install_dir + + def get_custom_install_mode(self) -> 'FileMode': + return self.custom_install_mode + + +class Man(HoldableObject): + + def __init__(self, sources: T.List[File], install_dir: T.Optional[str], + install_mode: 'FileMode', subproject: str, + locale: T.Optional[str]): + self.sources = sources + self.custom_install_dir = install_dir + self.custom_install_mode = install_mode + self.subproject = subproject + self.locale = locale + + def get_custom_install_dir(self) -> T.Optional[str]: + return self.custom_install_dir + + def get_custom_install_mode(self) -> 'FileMode': + return self.custom_install_mode + + def get_sources(self) -> T.List['File']: + return self.sources + + +class InstallDir(HoldableObject): + + def __init__(self, src_subdir: str, inst_subdir: str, install_dir: str, + install_mode: 'FileMode', + exclude: T.Tuple[T.Set[str], T.Set[str]], + strip_directory: bool, subproject: str, + from_source_dir: bool = True): + self.source_subdir = src_subdir + self.installable_subdir = inst_subdir + self.install_dir = install_dir + self.install_mode = install_mode + self.exclude = exclude + self.strip_directory = strip_directory + self.from_source_dir = from_source_dir + self.subproject = subproject + + +class Build: + """A class that holds the status of one build including + all dependencies and so on. + """ + + def __init__(self, environment: environment.Environment): + self.project_name = 'name of master project' + self.project_version = None + self.environment = environment + self.projects = {} + self.targets: T.MutableMapping[str, 'Target'] = OrderedDict() + self.run_target_names: T.Set[T.Tuple[str, str]] = set() + self.global_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {}) + self.global_link_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {}) + self.projects_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {}) + self.projects_link_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {}) + self.tests: T.List['Test'] = [] + self.benchmarks: T.List['Test'] = [] + self.headers: T.List[Headers] = [] + self.man: T.List[Man] = [] + self.data: T.List[Data] = [] + self.static_linker: PerMachine[StaticLinker] = PerMachine(None, None) + self.subprojects = {} + self.subproject_dir = '' + self.install_scripts = [] + self.postconf_scripts = [] + self.dist_scripts = [] + self.install_dirs: T.List[InstallDir] = [] + self.dep_manifest_name = None + self.dep_manifest = {} + self.stdlibs = PerMachine({}, {}) + self.test_setups: T.Dict[str, TestSetup] = {} + self.test_setup_default_name = None + self.find_overrides = {} + self.searched_programs = set() # The list of all programs that have been searched for. + + # If we are doing a cross build we need two caches, if we're doing a + # build == host compilation the both caches should point to the same place. + self.dependency_overrides: PerMachine[T.Dict[T.Tuple, DependencyOverride]] = PerMachineDefaultable.default( + environment.is_cross_build(), {}, {}) + self.devenv: T.List[EnvironmentVariables] = [] + + def get_build_targets(self): + build_targets = OrderedDict() + for name, t in self.targets.items(): + if isinstance(t, BuildTarget): + build_targets[name] = t + return build_targets + + def get_custom_targets(self): + custom_targets = OrderedDict() + for name, t in self.targets.items(): + if isinstance(t, CustomTarget): + custom_targets[name] = t + return custom_targets + + def copy(self): + other = Build(self.environment) + for k, v in self.__dict__.items(): + if isinstance(v, (list, dict, set, OrderedDict)): + other.__dict__[k] = v.copy() + else: + other.__dict__[k] = v + return other + + def merge(self, other): + for k, v in other.__dict__.items(): + self.__dict__[k] = v + + def ensure_static_linker(self, compiler): + if self.static_linker[compiler.for_machine] is None and compiler.needs_static_linker(): + self.static_linker[compiler.for_machine] = detect_static_linker(self.environment, compiler) + + def get_project(self): + return self.projects[''] + + def get_subproject_dir(self): + return self.subproject_dir + + def get_targets(self) -> T.Dict[str, 'Target']: + return self.targets + + def get_tests(self) -> T.List['Test']: + return self.tests + + def get_benchmarks(self) -> T.List['Test']: + return self.benchmarks + + def get_headers(self): + return self.headers + + def get_man(self): + return self.man + + def get_data(self): + return self.data + + def get_install_subdirs(self): + return self.install_dirs + + def get_global_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]: + d = self.global_args[for_machine] + return d.get(compiler.get_language(), []) + + def get_project_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]: + d = self.projects_args[for_machine] + args = d.get(project) + if not args: + return [] + return args.get(compiler.get_language(), []) + + def get_global_link_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]: + d = self.global_link_args[for_machine] + return d.get(compiler.get_language(), []) + + def get_project_link_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]: + d = self.projects_link_args[for_machine] + + link_args = d.get(project) + if not link_args: + return [] + + return link_args.get(compiler.get_language(), []) + +class IncludeDirs(HoldableObject): + + """Internal representation of an include_directories call.""" + + def __init__(self, curdir: str, dirs: T.List[str], is_system: bool, extra_build_dirs: T.Optional[T.List[str]] = None): + self.curdir = curdir + self.incdirs = dirs + self.is_system = is_system + + # Interpreter has validated that all given directories + # actually exist. + self.extra_build_dirs: T.List[str] = extra_build_dirs or [] + + def __repr__(self) -> str: + r = '<{} {}/{}>' + return r.format(self.__class__.__name__, self.curdir, self.incdirs) + + def get_curdir(self) -> str: + return self.curdir + + def get_incdirs(self) -> T.List[str]: + return self.incdirs + + def get_extra_build_dirs(self) -> T.List[str]: + return self.extra_build_dirs + + def to_string_list(self, sourcedir: str) -> T.List[str]: + """Convert IncludeDirs object to a list of strings.""" + strlist: T.List[str] = [] + for idir in self.incdirs: + strlist.append(os.path.join(sourcedir, self.curdir, idir)) + return strlist + +class ExtractedObjects(HoldableObject): + ''' + Holds a list of sources for which the objects must be extracted + ''' + def __init__(self, target, srclist=None, genlist=None, objlist=None, recursive=True): + self.target = target + self.recursive = recursive + self.srclist = srclist if srclist is not None else [] + self.genlist = genlist if genlist is not None else [] + self.objlist = objlist if objlist is not None else [] + if self.target.is_unity: + self.check_unity_compatible() + + def __repr__(self): + r = '<{0} {1!r}: {2}>' + return r.format(self.__class__.__name__, self.target.name, self.srclist) + + @staticmethod + def get_sources(sources, generated_sources): + # Merge sources and generated sources + sources = list(sources) + for gensrc in generated_sources: + for s in gensrc.get_outputs(): + # We cannot know the path where this source will be generated, + # but all we need here is the file extension to determine the + # compiler. + sources.append(s) + + # Filter out headers and all non-source files + return [s for s in sources if environment.is_source(s) and not environment.is_header(s)] + + def classify_all_sources(self, sources, generated_sources): + sources = self.get_sources(sources, generated_sources) + return classify_unity_sources(self.target.compilers.values(), sources) + + def check_unity_compatible(self): + # Figure out if the extracted object list is compatible with a Unity + # build. When we're doing a Unified build, we go through the sources, + # and create a single source file from each subset of the sources that + # can be compiled with a specific compiler. Then we create one object + # from each unified source file. So for each compiler we can either + # extra all its sources or none. + cmpsrcs = self.classify_all_sources(self.target.sources, self.target.generated) + extracted_cmpsrcs = self.classify_all_sources(self.srclist, self.genlist) + + for comp, srcs in extracted_cmpsrcs.items(): + if set(srcs) != set(cmpsrcs[comp]): + raise MesonException('Single object files can not be extracted ' + 'in Unity builds. You can only extract all ' + 'the object files for each compiler at once.') + + def get_outputs(self, backend): + return [ + backend.object_filename_from_source(self.target, source) + for source in self.get_sources(self.srclist, self.genlist) + ] + +class EnvironmentVariables(HoldableObject): + def __init__(self) -> None: + self.envvars = [] + # The set of all env vars we have operations for. Only used for self.has_name() + self.varnames = set() + + def __repr__(self): + repr_str = "<{0}: {1}>" + return repr_str.format(self.__class__.__name__, self.envvars) + + def has_name(self, name: str) -> bool: + return name in self.varnames + + def set(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None: + self.varnames.add(name) + self.envvars.append((self._set, name, values, separator)) + + def append(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None: + self.varnames.add(name) + self.envvars.append((self._append, name, values, separator)) + + def prepend(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None: + self.varnames.add(name) + self.envvars.append((self._prepend, name, values, separator)) + + def _set(self, env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str: + return separator.join(values) + + def _append(self, env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str: + curr = env.get(name) + return separator.join(values if curr is None else [curr] + values) + + def _prepend(self, env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str: + curr = env.get(name) + return separator.join(values if curr is None else values + [curr]) + + def get_env(self, full_env: T.Dict[str, str]) -> T.Dict[str, str]: + env = full_env.copy() + for method, name, values, separator in self.envvars: + env[name] = method(env, name, values, separator) + return env + +class Target(HoldableObject): + + # TODO: should Target be an abc.ABCMeta? + + def __init__(self, name: str, subdir: str, subproject: str, build_by_default: bool, for_machine: MachineChoice): + if has_path_sep(name): + # Fix failing test 53 when this becomes an error. + mlog.warning(textwrap.dedent(f'''\ + Target "{name}" has a path separator in its name. + This is not supported, it can cause unexpected failures and will become + a hard error in the future.\ + ''')) + self.name = name + self.subdir = subdir + self.subproject = subproject + self.build_by_default = build_by_default + self.for_machine = for_machine + self.install = False + self.build_always_stale = False + self.option_overrides_base: T.Dict[OptionKey, str] = {} + self.option_overrides_compiler: T.Dict[OptionKey, str] = {} + self.extra_files = [] # type: T.List[File] + if not hasattr(self, 'typename'): + raise RuntimeError(f'Target type is not set for target class "{type(self).__name__}". This is a bug') + + def __lt__(self, other: object) -> bool: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() < other.get_id() + + def __le__(self, other: object) -> bool: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() <= other.get_id() + + def __gt__(self, other: object) -> bool: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() > other.get_id() + + def __ge__(self, other: object) -> bool: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() >= other.get_id() + + def get_default_install_dir(self, env: environment.Environment) -> str: + raise NotImplementedError + + def get_install_dir(self, environment: environment.Environment) -> T.Tuple[T.Any, bool]: + # Find the installation directory. + default_install_dir = self.get_default_install_dir(environment) + outdirs = self.get_custom_install_dir() + if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: + # Either the value is set to a non-default value, or is set to + # False (which means we want this specific output out of many + # outputs to not be installed). + custom_install_dir = True + else: + custom_install_dir = False + outdirs[0] = default_install_dir + return outdirs, custom_install_dir + + def get_basename(self) -> str: + return self.name + + def get_subdir(self) -> str: + return self.subdir + + def get_typename(self) -> str: + return self.typename + + @staticmethod + def _get_id_hash(target_id): + # We don't really need cryptographic security here. + # Small-digest hash function with unlikely collision is good enough. + h = hashlib.sha256() + h.update(target_id.encode(encoding='utf-8', errors='replace')) + # This ID should be case-insensitive and should work in Visual Studio, + # e.g. it should not start with leading '-'. + return h.hexdigest()[:7] + + @staticmethod + def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str: + """Construct target ID from subdir, name and type suffix. + + This helper function is made public mostly for tests.""" + # This ID must also be a valid file name on all OSs. + # It should also avoid shell metacharacters for obvious + # reasons. '@' is not used as often as '_' in source code names. + # In case of collisions consider using checksums. + # FIXME replace with assert when slash in names is prohibited + name_part = name.replace('/', '@').replace('\\', '@') + assert not has_path_sep(type_suffix) + my_id = name_part + type_suffix + if subdir: + subdir_part = Target._get_id_hash(subdir) + # preserve myid for better debuggability + return subdir_part + '@@' + my_id + return my_id + + def get_id(self) -> str: + return self.construct_id_from_path( + self.subdir, self.name, self.type_suffix()) + + def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None: + if 'build_by_default' in kwargs: + self.build_by_default = kwargs['build_by_default'] + if not isinstance(self.build_by_default, bool): + raise InvalidArguments('build_by_default must be a boolean value.') + elif kwargs.get('install', False): + # For backward compatibility, if build_by_default is not explicitly + # set, use the value of 'install' if it's enabled. + self.build_by_default = True + + option_overrides = self.parse_overrides(kwargs) + + for k, v in option_overrides.items(): + if k.lang: + self.option_overrides_compiler[k.evolve(machine=self.for_machine)] = v + continue + self.option_overrides_base[k] = v + + @staticmethod + def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]: + result: T.Dict[OptionKey, str] = {} + overrides = stringlistify(kwargs.get('override_options', [])) + for o in overrides: + if '=' not in o: + raise InvalidArguments('Overrides must be of form "key=value"') + k, v = o.split('=', 1) + key = OptionKey.from_string(k.strip()) + v = v.strip() + result[key] = v + return result + + def is_linkable_target(self) -> bool: + return False + + def get_outputs(self) -> T.List[str]: + return [] + + def should_install(self) -> bool: + return False + +class BuildTarget(Target): + known_kwargs = known_build_target_kwargs + + def __init__(self, name: str, subdir: str, subproject: str, for_machine: MachineChoice, + sources: T.List['SourceOutputs'], objects, environment: environment.Environment, kwargs): + super().__init__(name, subdir, subproject, True, for_machine) + unity_opt = environment.coredata.get_option(OptionKey('unity')) + self.is_unity = unity_opt == 'on' or (unity_opt == 'subprojects' and subproject != '') + self.environment = environment + self.compilers = OrderedDict() # type: OrderedDict[str, Compiler] + self.objects = [] + self.external_deps = [] + self.include_dirs = [] + self.link_language = kwargs.get('link_language') + self.link_targets: T.List[BuildTarget] = [] + self.link_whole_targets = [] + self.link_depends = [] + self.added_deps = set() + self.name_prefix_set = False + self.name_suffix_set = False + self.filename = 'no_name' + # The list of all files outputted by this target. Useful in cases such + # as Vala which generates .vapi and .h besides the compiled output. + self.outputs = [self.filename] + self.need_install = False + self.pch = {} + self.extra_args: T.Dict[str, T.List['FileOrString']] = {} + self.sources: T.List[File] = [] + self.generated: T.List[T.Union[GeneratedList, CustomTarget, CustomTargetIndex]] = [] + self.d_features = {} + self.pic = False + self.pie = False + # Track build_rpath entries so we can remove them at install time + self.rpath_dirs_to_remove: T.Set[bytes] = set() + self.process_sourcelist(sources) + # Objects can be: + # 1. Pre-existing objects provided by the user with the `objects:` kwarg + # 2. Compiled objects created by and extracted from another target + self.process_objectlist(objects) + self.process_kwargs(kwargs, environment) + self.check_unknown_kwargs(kwargs) + self.process_compilers() + if not any([self.sources, self.generated, self.objects, self.link_whole]): + raise InvalidArguments(f'Build target {name} has no sources.') + self.process_compilers_late() + self.validate_sources() + self.validate_install(environment) + self.check_module_linking() + + def __repr__(self): + repr_str = "<{0} {1}: {2}>" + return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) + + def __str__(self): + return f"{self.name}" + + def validate_install(self, environment): + if self.for_machine is MachineChoice.BUILD and self.need_install: + if environment.is_cross_build(): + raise InvalidArguments('Tried to install a target for the build machine in a cross build.') + else: + mlog.warning('Installing target build for the build machine. This will fail in a cross build.') + + def check_unknown_kwargs(self, kwargs): + # Override this method in derived classes that have more + # keywords. + self.check_unknown_kwargs_int(kwargs, self.known_kwargs) + + def check_unknown_kwargs_int(self, kwargs, known_kwargs): + unknowns = [] + for k in kwargs: + if k not in known_kwargs: + unknowns.append(k) + if len(unknowns) > 0: + mlog.warning('Unknown keyword argument(s) in target {}: {}.'.format(self.name, ', '.join(unknowns))) + + def process_objectlist(self, objects): + assert(isinstance(objects, list)) + for s in objects: + if isinstance(s, (str, File, ExtractedObjects)): + self.objects.append(s) + elif isinstance(s, (GeneratedList, CustomTarget)): + msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \ + f'for target {self.name!r}.\nIt is meant only for ' + \ + 'pre-built object files that are shipped with the\nsource ' + \ + 'tree. Try adding it in the list of sources.' + raise InvalidArguments(msg) + else: + raise InvalidArguments(f'Bad object of type {type(s).__name__!r} in target {self.name!r}.') + + def process_sourcelist(self, sources: T.List['SourceOutputs']) -> None: + """Split sources into generated and static sources. + + Sources can be: + 1. Pre-existing source files in the source tree (static) + 2. Pre-existing sources generated by configure_file in the build tree. + (static as they are only regenerated if meson itself is regenerated) + 3. Sources files generated by another target or a Generator (generated) + """ + added_sources: T.Set[File] = set() # If the same source is defined multiple times, use it only once. + for s in sources: + if isinstance(s, File): + if s not in added_sources: + self.sources.append(s) + added_sources.add(s) + elif isinstance(s, (CustomTarget, CustomTargetIndex, GeneratedList)): + self.generated.append(s) + + @staticmethod + def can_compile_remove_sources(compiler: 'Compiler', sources: T.List['FileOrString']) -> bool: + removed = False + for s in sources[:]: + if compiler.can_compile(s): + sources.remove(s) + removed = True + return removed + + def process_compilers_late(self): + """Processes additional compilers after kwargs have been evaluated. + + This can add extra compilers that might be required by keyword + arguments, such as link_with or dependencies. It will also try to guess + which compiler to use if one hasn't been selected already. + """ + # Populate list of compilers + compilers = self.environment.coredata.compilers[self.for_machine] + + # did user override clink_langs for this target? + link_langs = [self.link_language] if self.link_language else clink_langs + + # If this library is linked against another library we need to consider + # the languages of those libraries as well. + if self.link_targets or self.link_whole_targets: + extra = set() + for t in itertools.chain(self.link_targets, self.link_whole_targets): + if isinstance(t, CustomTarget) or isinstance(t, CustomTargetIndex): + continue # We can't know anything about these. + for name, compiler in t.compilers.items(): + if name in link_langs: + extra.add((name, compiler)) + for name, compiler in sorted(extra, key=lambda p: sort_clink(p[0])): + self.compilers[name] = compiler + + if not self.compilers: + # No source files or parent targets, target consists of only object + # files of unknown origin. Just add the first clink compiler + # that we have and hope that it can link these objects + for lang in link_langs: + if lang in compilers: + self.compilers[lang] = compilers[lang] + break + + def process_compilers(self): + ''' + Populate self.compilers, which is the list of compilers that this + target will use for compiling all its sources. + We also add compilers that were used by extracted objects to simplify + dynamic linker determination. + ''' + if not self.sources and not self.generated and not self.objects: + return + # Populate list of compilers + compilers = self.environment.coredata.compilers[self.for_machine] + # Pre-existing sources + sources = list(self.sources) + # All generated sources + for gensrc in self.generated: + for s in gensrc.get_outputs(): + # Generated objects can't be compiled, so don't use them for + # compiler detection. If our target only has generated objects, + # we will fall back to using the first c-like compiler we find, + # which is what we need. + if not is_object(s): + sources.append(s) + for d in self.external_deps: + for s in d.sources: + if isinstance(s, (str, File)): + sources.append(s) + + # Sources that were used to create our extracted objects + for o in self.objects: + if not isinstance(o, ExtractedObjects): + continue + for s in o.srclist: + # Don't add Vala sources since that will pull in the Vala + # compiler even though we will never use it since we are + # dealing with compiled C code. + if not s.endswith(lang_suffixes['vala']): + sources.append(s) + if sources: + # For each source, try to add one compiler that can compile it. + # + # If it has a suffix that belongs to a known language, we must have + # a compiler for that language. + # + # Otherwise, it's ok if no compilers can compile it, because users + # are expected to be able to add arbitrary non-source files to the + # sources list + for s in sources: + for lang, compiler in compilers.items(): + if compiler.can_compile(s): + if lang not in self.compilers: + self.compilers[lang] = compiler + break + else: + if is_known_suffix(s): + raise MesonException('No {} machine compiler for "{}"'. + format(self.for_machine.get_lower_case_name(), s)) + + # Re-sort according to clink_langs + self.compilers = OrderedDict(sorted(self.compilers.items(), + key=lambda t: sort_clink(t[0]))) + + # If all our sources are Vala, our target also needs the C compiler but + # it won't get added above. + if ('vala' in self.compilers or 'cython' in self.compilers) and 'c' not in self.compilers: + self.compilers['c'] = compilers['c'] + + def validate_sources(self): + if not self.sources: + return + for lang in ('cs', 'java'): + if lang in self.compilers: + check_sources = list(self.sources) + compiler = self.compilers[lang] + if not self.can_compile_remove_sources(compiler, check_sources): + raise InvalidArguments(f'No {lang} sources found in target {self.name!r}') + if check_sources: + m = '{0} targets can only contain {0} files:\n'.format(lang.capitalize()) + m += '\n'.join([repr(c) for c in check_sources]) + raise InvalidArguments(m) + # CSharp and Java targets can't contain any other file types + assert(len(self.compilers) == 1) + return + + def process_link_depends(self, sources, environment): + """Process the link_depends keyword argument. + + This is designed to handle strings, Files, and the output of Custom + Targets. Notably it doesn't handle generator() returned objects, since + adding them as a link depends would inherently cause them to be + generated twice, since the output needs to be passed to the ld_args and + link_depends. + """ + sources = listify(sources) + for s in sources: + if isinstance(s, File): + self.link_depends.append(s) + elif isinstance(s, str): + self.link_depends.append( + File.from_source_file(environment.source_dir, self.subdir, s)) + elif hasattr(s, 'get_outputs'): + self.link_depends.extend( + [File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()]) + else: + raise InvalidArguments( + 'Link_depends arguments must be strings, Files, ' + 'or a Custom Target, or lists thereof.') + + def get_original_kwargs(self): + return self.kwargs + + def copy_kwargs(self, kwargs): + self.kwargs = copy.copy(kwargs) + for k, v in self.kwargs.items(): + if isinstance(v, list): + self.kwargs[k] = listify(v, flatten=True) + for t in ['dependencies', 'link_with', 'include_directories', 'sources']: + if t in self.kwargs: + self.kwargs[t] = listify(self.kwargs[t], flatten=True) + + def extract_objects(self, srclist: T.List[FileOrString]) -> ExtractedObjects: + obj_src = [] + sources_set = set(self.sources) + for src in srclist: + if isinstance(src, str): + src = File(False, self.subdir, src) + elif isinstance(src, File): + FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject) + else: + raise MesonException(f'Object extraction arguments must be strings or Files (got {type(src).__name__}).') + # FIXME: It could be a generated source + if src not in sources_set: + raise MesonException(f'Tried to extract unknown source {src}.') + obj_src.append(src) + return ExtractedObjects(self, obj_src) + + def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects: + return ExtractedObjects(self, self.sources, self.generated, self.objects, + recursive) + + def get_all_link_deps(self): + return self.get_transitive_link_deps() + + @lru_cache(maxsize=None) + def get_transitive_link_deps(self) -> 'ImmutableListProtocol[Target]': + result: T.List[Target] = [] + for i in self.link_targets: + result += i.get_all_link_deps() + return result + + def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]: + return self.get_transitive_link_deps_mapping(prefix, environment) + + @lru_cache(maxsize=None) + def get_transitive_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]: + result: T.Dict[str, str] = {} + for i in self.link_targets: + mapping = i.get_link_deps_mapping(prefix, environment) + #we are merging two dictionaries, while keeping the earlier one dominant + result_tmp = mapping.copy() + result_tmp.update(result) + result = result_tmp + return result + + @lru_cache(maxsize=None) + def get_link_dep_subdirs(self) -> 'ImmutableSetProtocol[str]': + result: OrderedSet[str] = OrderedSet() + for i in self.link_targets: + if not isinstance(i, StaticLibrary): + result.add(i.get_subdir()) + result.update(i.get_link_dep_subdirs()) + return result + + def get_default_install_dir(self, environment: environment.Environment) -> str: + return environment.get_libdir() + + def get_custom_install_dir(self): + return self.install_dir + + def get_custom_install_mode(self): + return self.install_mode + + def process_kwargs(self, kwargs, environment): + self.process_kwargs_base(kwargs) + self.copy_kwargs(kwargs) + kwargs.get('modules', []) + self.need_install = kwargs.get('install', self.need_install) + llist = extract_as_list(kwargs, 'link_with') + for linktarget in llist: + if isinstance(linktarget, dependencies.ExternalLibrary): + raise MesonException(textwrap.dedent('''\ + An external library was used in link_with keyword argument, which + is reserved for libraries built as part of this project. External + libraries must be passed using the dependencies keyword argument + instead, because they are conceptually "external dependencies", + just like those detected with the dependency() function.\ + ''')) + self.link(linktarget) + lwhole = extract_as_list(kwargs, 'link_whole') + for linktarget in lwhole: + self.link_whole(linktarget) + + c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \ + = [extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args']] + + self.add_pch('c', c_pchlist) + self.add_pch('cpp', cpp_pchlist) + compiler_args = {'c': clist, 'cpp': cpplist, 'cuda': cudalist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist, + 'fortran': fortranlist, 'rust': rustlist + } + for key, value in compiler_args.items(): + self.add_compiler_args(key, value) + + if not isinstance(self, Executable) or 'export_dynamic' in kwargs: + self.vala_header = kwargs.get('vala_header', self.name + '.h') + self.vala_vapi = kwargs.get('vala_vapi', self.name + '.vapi') + self.vala_gir = kwargs.get('vala_gir', None) + + dlist = stringlistify(kwargs.get('d_args', [])) + self.add_compiler_args('d', dlist) + dfeatures = dict() + dfeature_unittest = kwargs.get('d_unittest', False) + if dfeature_unittest: + dfeatures['unittest'] = dfeature_unittest + dfeature_versions = kwargs.get('d_module_versions', []) + if dfeature_versions: + dfeatures['versions'] = dfeature_versions + dfeature_debug = kwargs.get('d_debug', []) + if dfeature_debug: + dfeatures['debug'] = dfeature_debug + if 'd_import_dirs' in kwargs: + dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs') + for d in dfeature_import_dirs: + if not isinstance(d, IncludeDirs): + raise InvalidArguments('Arguments to d_import_dirs must be include_directories.') + dfeatures['import_dirs'] = dfeature_import_dirs + if dfeatures: + self.d_features = dfeatures + + self.link_args = extract_as_list(kwargs, 'link_args') + for i in self.link_args: + if not isinstance(i, str): + raise InvalidArguments('Link_args arguments must be strings.') + for l in self.link_args: + if '-Wl,-rpath' in l or l.startswith('-rpath'): + mlog.warning(textwrap.dedent('''\ + Please do not define rpath with a linker argument, use install_rpath + or build_rpath properties instead. + This will become a hard error in a future Meson release.\ + ''')) + self.process_link_depends(kwargs.get('link_depends', []), environment) + # Target-specific include dirs must be added BEFORE include dirs from + # internal deps (added inside self.add_deps()) to override them. + inclist = extract_as_list(kwargs, 'include_directories') + self.add_include_dirs(inclist) + # Add dependencies (which also have include_directories) + deplist = extract_as_list(kwargs, 'dependencies') + self.add_deps(deplist) + # If an item in this list is False, the output corresponding to + # the list index of that item will not be installed + self.install_dir = typeslistify(kwargs.get('install_dir', [None]), + (str, bool)) + self.install_mode = kwargs.get('install_mode', None) + main_class = kwargs.get('main_class', '') + if not isinstance(main_class, str): + raise InvalidArguments('Main class must be a string') + self.main_class = main_class + if isinstance(self, Executable): + # This kwarg is deprecated. The value of "none" means that the kwarg + # was not specified and win_subsystem should be used instead. + self.gui_app = None + if 'gui_app' in kwargs: + if 'win_subsystem' in kwargs: + raise InvalidArguments('Can specify only gui_app or win_subsystem for a target, not both.') + self.gui_app = kwargs['gui_app'] + if not isinstance(self.gui_app, bool): + raise InvalidArguments('Argument gui_app must be boolean.') + self.win_subsystem = self.validate_win_subsystem(kwargs.get('win_subsystem', 'console')) + elif 'gui_app' in kwargs: + raise InvalidArguments('Argument gui_app can only be used on executables.') + elif 'win_subsystem' in kwargs: + raise InvalidArguments('Argument win_subsystem can only be used on executables.') + extra_files = extract_as_list(kwargs, 'extra_files') + for i in extra_files: + assert(isinstance(i, File)) + trial = os.path.join(environment.get_source_dir(), i.subdir, i.fname) + if not(os.path.isfile(trial)): + raise InvalidArguments(f'Tried to add non-existing extra file {i}.') + self.extra_files = extra_files + self.install_rpath: str = kwargs.get('install_rpath', '') + if not isinstance(self.install_rpath, str): + raise InvalidArguments('Install_rpath is not a string.') + self.build_rpath = kwargs.get('build_rpath', '') + if not isinstance(self.build_rpath, str): + raise InvalidArguments('Build_rpath is not a string.') + resources = extract_as_list(kwargs, 'resources') + for r in resources: + if not isinstance(r, str): + raise InvalidArguments('Resource argument is not a string.') + trial = os.path.join(environment.get_source_dir(), self.subdir, r) + if not os.path.isfile(trial): + raise InvalidArguments(f'Tried to add non-existing resource {r}.') + self.resources = resources + if 'name_prefix' in kwargs: + name_prefix = kwargs['name_prefix'] + if isinstance(name_prefix, list): + if name_prefix: + raise InvalidArguments('name_prefix array must be empty to signify default.') + else: + if not isinstance(name_prefix, str): + raise InvalidArguments('name_prefix must be a string.') + self.prefix = name_prefix + self.name_prefix_set = True + if 'name_suffix' in kwargs: + name_suffix = kwargs['name_suffix'] + if isinstance(name_suffix, list): + if name_suffix: + raise InvalidArguments('name_suffix array must be empty to signify default.') + else: + if not isinstance(name_suffix, str): + raise InvalidArguments('name_suffix must be a string.') + if name_suffix == '': + raise InvalidArguments('name_suffix should not be an empty string. ' + 'If you want meson to use the default behaviour ' + 'for each platform pass `[]` (empty array)') + self.suffix = name_suffix + self.name_suffix_set = True + if isinstance(self, StaticLibrary): + # You can't disable PIC on OS X. The compiler ignores -fno-PIC. + # PIC is always on for Windows (all code is position-independent + # since library loading is done differently) + m = self.environment.machines[self.for_machine] + if m.is_darwin() or m.is_windows(): + self.pic = True + else: + self.pic = self._extract_pic_pie(kwargs, 'pic', environment, 'b_staticpic') + if isinstance(self, Executable) or (isinstance(self, StaticLibrary) and not self.pic): + # Executables must be PIE on Android + if self.environment.machines[self.for_machine].is_android(): + self.pie = True + else: + self.pie = self._extract_pic_pie(kwargs, 'pie', environment, 'b_pie') + self.implicit_include_directories = kwargs.get('implicit_include_directories', True) + if not isinstance(self.implicit_include_directories, bool): + raise InvalidArguments('Implicit_include_directories must be a boolean.') + self.gnu_symbol_visibility = kwargs.get('gnu_symbol_visibility', '') + if not isinstance(self.gnu_symbol_visibility, str): + raise InvalidArguments('GNU symbol visibility must be a string.') + if self.gnu_symbol_visibility != '': + permitted = ['default', 'internal', 'hidden', 'protected', 'inlineshidden'] + if self.gnu_symbol_visibility not in permitted: + raise InvalidArguments('GNU symbol visibility arg {} not one of: {}'.format(self.symbol_visibility, ', '.join(permitted))) + + def validate_win_subsystem(self, value: str) -> str: + value = value.lower() + if re.fullmatch(r'(boot_application|console|efi_application|efi_boot_service_driver|efi_rom|efi_runtime_driver|native|posix|windows)(,\d+(\.\d+)?)?', value) is None: + raise InvalidArguments(f'Invalid value for win_subsystem: {value}.') + return value + + def _extract_pic_pie(self, kwargs, arg: str, environment, option: str): + # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags + all_flags = self.extra_args['c'] + self.extra_args['cpp'] + if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags: + mlog.warning(f"Use the '{arg}' kwarg instead of passing '-f{arg}' manually to {self.name!r}") + return True + + k = OptionKey(option) + if arg in kwargs: + val = kwargs[arg] + elif k in environment.coredata.options: + val = environment.coredata.options[k].value + else: + val = False + + if not isinstance(val, bool): + raise InvalidArguments(f'Argument {arg} to {self.name!r} must be boolean') + return val + + def get_filename(self): + return self.filename + + def get_outputs(self) -> T.List[str]: + return self.outputs + + def get_extra_args(self, language): + return self.extra_args.get(language, []) + + def get_dependencies(self, exclude=None): + transitive_deps = [] + if exclude is None: + exclude = [] + for t in itertools.chain(self.link_targets, self.link_whole_targets): + if t in transitive_deps or t in exclude: + continue + transitive_deps.append(t) + if isinstance(t, StaticLibrary): + transitive_deps += t.get_dependencies(transitive_deps + exclude) + return transitive_deps + + def get_source_subdir(self): + return self.subdir + + def get_sources(self): + return self.sources + + def get_objects(self): + return self.objects + + def get_generated_sources(self): + return self.generated + + def should_install(self) -> bool: + return self.need_install + + def has_pch(self): + return len(self.pch) > 0 + + def get_pch(self, language): + try: + return self.pch[language] + except KeyError: + return[] + + def get_include_dirs(self): + return self.include_dirs + + def add_deps(self, deps): + deps = listify(deps) + for dep in deps: + if dep in self.added_deps: + continue + if isinstance(dep, dependencies.InternalDependency): + # Those parts that are internal. + self.process_sourcelist(dep.sources) + self.add_include_dirs(dep.include_directories, dep.get_include_type()) + for l in dep.libraries: + self.link(l) + for l in dep.whole_libraries: + self.link_whole(l) + if dep.get_compile_args() or dep.get_link_args(): + # Those parts that are external. + extpart = dependencies.InternalDependency('undefined', + [], + dep.get_compile_args(), + dep.get_link_args(), + [], [], [], [], {}) + self.external_deps.append(extpart) + # Deps of deps. + self.add_deps(dep.ext_deps) + elif isinstance(dep, dependencies.Dependency): + if dep not in self.external_deps: + self.external_deps.append(dep) + self.process_sourcelist(dep.get_sources()) + self.add_deps(dep.ext_deps) + elif isinstance(dep, BuildTarget): + raise InvalidArguments('''Tried to use a build target as a dependency. +You probably should put it in link_with instead.''') + else: + # This is a bit of a hack. We do not want Build to know anything + # about the interpreter so we can't import it and use isinstance. + # This should be reliable enough. + if hasattr(dep, 'project_args_frozen') or hasattr(dep, 'global_args_frozen'): + raise InvalidArguments('Tried to use subproject object as a dependency.\n' + 'You probably wanted to use a dependency declared in it instead.\n' + 'Access it by calling get_variable() on the subproject object.') + raise InvalidArguments(f'Argument is of an unacceptable type {type(dep).__name__!r}.\nMust be ' + 'either an external dependency (returned by find_library() or ' + 'dependency()) or an internal dependency (returned by ' + 'declare_dependency()).') + self.added_deps.add(dep) + + def get_external_deps(self): + return self.external_deps + + def is_internal(self): + return isinstance(self, StaticLibrary) and not self.need_install + + def link(self, target): + for t in listify(target): + if isinstance(self, StaticLibrary) and self.need_install: + if isinstance(t, (CustomTarget, CustomTargetIndex)): + if not t.should_install(): + mlog.warning(f'Try to link an installed static library target {self.name} with a' + 'custom target that is not installed, this might cause problems' + 'when you try to use this static library') + elif t.is_internal(): + # When we're a static library and we link_with to an + # internal/convenience library, promote to link_whole. + return self.link_whole(t) + if not isinstance(t, (Target, CustomTargetIndex)): + raise InvalidArguments(f'{t!r} is not a target.') + if not t.is_linkable_target(): + raise InvalidArguments(f"Link target '{t!s}' is not linkable.") + if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic: + msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. " + msg += "Use the 'pic' option to static_library to build with PIC." + raise InvalidArguments(msg) + if self.for_machine is not t.for_machine: + msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}' + if self.environment.is_cross_build(): + raise InvalidArguments(msg + ' This is not possible in a cross build.') + else: + mlog.warning(msg + ' This will fail in cross build.') + self.link_targets.append(t) + + def link_whole(self, target): + for t in listify(target): + if isinstance(t, (CustomTarget, CustomTargetIndex)): + if not t.is_linkable_target(): + raise InvalidArguments(f'Custom target {t!r} is not linkable.') + if not t.get_filename().endswith('.a'): + raise InvalidArguments('Can only link_whole custom targets that are .a archives.') + if isinstance(self, StaticLibrary): + # FIXME: We could extract the .a archive to get object files + raise InvalidArguments('Cannot link_whole a custom target into a static library') + elif not isinstance(t, StaticLibrary): + raise InvalidArguments(f'{t!r} is not a static library.') + elif isinstance(self, SharedLibrary) and not t.pic: + msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. " + msg += "Use the 'pic' option to static_library to build with PIC." + raise InvalidArguments(msg) + if self.for_machine is not t.for_machine: + msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}' + if self.environment.is_cross_build(): + raise InvalidArguments(msg + ' This is not possible in a cross build.') + else: + mlog.warning(msg + ' This will fail in cross build.') + if isinstance(self, StaticLibrary): + # When we're a static library and we link_whole: to another static + # library, we need to add that target's objects to ourselves. + self.objects += t.extract_all_objects_recurse() + self.link_whole_targets.append(t) + + def extract_all_objects_recurse(self): + objs = [self.extract_all_objects()] + for t in self.link_targets: + if t.is_internal(): + objs += t.extract_all_objects_recurse() + return objs + + def add_pch(self, language, pchlist): + if not pchlist: + return + elif len(pchlist) == 1: + if not environment.is_header(pchlist[0]): + raise InvalidArguments(f'PCH argument {pchlist[0]} is not a header.') + elif len(pchlist) == 2: + if environment.is_header(pchlist[0]): + if not environment.is_source(pchlist[1]): + raise InvalidArguments('PCH definition must contain one header and at most one source.') + elif environment.is_source(pchlist[0]): + if not environment.is_header(pchlist[1]): + raise InvalidArguments('PCH definition must contain one header and at most one source.') + pchlist = [pchlist[1], pchlist[0]] + else: + raise InvalidArguments(f'PCH argument {pchlist[0]} is of unknown type.') + + if (os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1])): + raise InvalidArguments('PCH files must be stored in the same folder.') + + mlog.warning('PCH source files are deprecated, only a single header file should be used.') + elif len(pchlist) > 2: + raise InvalidArguments('PCH definition may have a maximum of 2 files.') + for f in pchlist: + if not isinstance(f, str): + raise MesonException('PCH arguments must be strings.') + if not os.path.isfile(os.path.join(self.environment.source_dir, self.subdir, f)): + raise MesonException(f'File {f} does not exist.') + self.pch[language] = pchlist + + def add_include_dirs(self, args, set_is_system: T.Optional[str] = None): + ids = [] + for a in args: + if not isinstance(a, IncludeDirs): + raise InvalidArguments('Include directory to be added is not an include directory object.') + ids.append(a) + if set_is_system is None: + set_is_system = 'preserve' + if set_is_system != 'preserve': + is_system = set_is_system == 'system' + ids = [IncludeDirs(x.get_curdir(), x.get_incdirs(), is_system, x.get_extra_build_dirs()) for x in ids] + self.include_dirs += ids + + def add_compiler_args(self, language: str, args: T.List['FileOrString']) -> None: + args = listify(args) + for a in args: + if not isinstance(a, (str, File)): + raise InvalidArguments('A non-string passed to compiler args.') + if language in self.extra_args: + self.extra_args[language] += args + else: + self.extra_args[language] = args + + def get_aliases(self) -> T.Dict[str, str]: + return {} + + def get_langs_used_by_deps(self) -> T.List[str]: + ''' + Sometimes you want to link to a C++ library that exports C API, which + means the linker must link in the C++ stdlib, and we must use a C++ + compiler for linking. The same is also applicable for objc/objc++, etc, + so we can keep using clink_langs for the priority order. + + See: https://github.com/mesonbuild/meson/issues/1653 + ''' + langs = [] # type: T.List[str] + + # Check if any of the external libraries were written in this language + for dep in self.external_deps: + if dep.language is None: + continue + if dep.language not in langs: + langs.append(dep.language) + # Check if any of the internal libraries this target links to were + # written in this language + for link_target in itertools.chain(self.link_targets, self.link_whole_targets): + if isinstance(link_target, (CustomTarget, CustomTargetIndex)): + continue + for language in link_target.compilers: + if language not in langs: + langs.append(language) + + return langs + + def get_prelinker(self): + all_compilers = self.environment.coredata.compilers[self.for_machine] + if self.link_language: + comp = all_compilers[self.link_language] + return comp + for l in clink_langs: + if l in self.compilers: + try: + prelinker = all_compilers[l] + except KeyError: + raise MesonException( + f'Could not get a prelinker linker for build target {self.name!r}. ' + f'Requires a compiler for language "{l}", but that is not ' + 'a project language.') + return prelinker + raise MesonException(f'Could not determine prelinker for {self.name!r}.') + + def get_clink_dynamic_linker_and_stdlibs(self): + ''' + We use the order of languages in `clink_langs` to determine which + linker to use in case the target has sources compiled with multiple + compilers. All languages other than those in this list have their own + linker. + Note that Vala outputs C code, so Vala sources can use any linker + that can link compiled C. We don't actually need to add an exception + for Vala here because of that. + ''' + # Populate list of all compilers, not just those being used to compile + # sources in this target + all_compilers = self.environment.coredata.compilers[self.for_machine] + + # If the user set the link_language, just return that. + if self.link_language: + comp = all_compilers[self.link_language] + return comp, comp.language_stdlib_only_link_flags() + + # Languages used by dependencies + dep_langs = self.get_langs_used_by_deps() + # Pick a compiler based on the language priority-order + for l in clink_langs: + if l in self.compilers or l in dep_langs: + try: + linker = all_compilers[l] + except KeyError: + raise MesonException( + f'Could not get a dynamic linker for build target {self.name!r}. ' + f'Requires a linker for language "{l}", but that is not ' + 'a project language.') + stdlib_args = [] + added_languages = set() + for dl in itertools.chain(self.compilers, dep_langs): + if dl != linker.language: + stdlib_args += all_compilers[dl].language_stdlib_only_link_flags() + added_languages.add(dl) + # Type of var 'linker' is Compiler. + # Pretty hard to fix because the return value is passed everywhere + return linker, stdlib_args + + raise AssertionError(f'Could not get a dynamic linker for build target {self.name!r}') + + def uses_rust(self) -> bool: + """Is this target a rust target.""" + if self.sources: + first_file = self.sources[0] + if first_file.fname.endswith('.rs'): + return True + elif self.generated: + if self.generated[0].get_outputs()[0].endswith('.rs'): + return True + return False + + def get_using_msvc(self): + ''' + Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary, + and SharedLibrary for deciding when to use MSVC-specific file naming + and debug filenames. + + If at least some code is built with MSVC and the final library is + linked with MSVC, we can be sure that some debug info will be + generated. We only check the dynamic linker here because the static + linker is guaranteed to be of the same type. + + Interesting cases: + 1. The Vala compiler outputs C code to be compiled by whatever + C compiler we're using, so all objects will still be created by the + MSVC compiler. + 2. If the target contains only objects, process_compilers guesses and + picks the first compiler that smells right. + ''' + # Rustc can use msvc style linkers + if self.uses_rust(): + compiler = self.environment.coredata.compilers[self.for_machine]['rust'] + else: + compiler, _ = self.get_clink_dynamic_linker_and_stdlibs() + # Mixing many languages with MSVC is not supported yet so ignore stdlibs. + return compiler and compiler.get_linker_id() in {'link', 'lld-link', 'xilink', 'optlink'} + + def check_module_linking(self): + ''' + Warn if shared modules are linked with target: (link_with) #2865 + ''' + for link_target in self.link_targets: + if isinstance(link_target, SharedModule): + if self.environment.machines[self.for_machine].is_darwin(): + raise MesonException( + 'target links against shared modules. This is not permitted on OSX') + else: + mlog.warning('target links against shared modules. This ' + 'is not recommended as it is not supported on some ' + 'platforms') + return + +class Generator(HoldableObject): + def __init__(self, exe: T.Union['Executable', programs.ExternalProgram], + arguments: T.List[str], + output: T.List[str], + *, + depfile: T.Optional[str] = None, + capture: bool = False, + depends: T.Optional[T.List[T.Union[BuildTarget, 'CustomTarget']]] = None, + name: str = 'Generator'): + self.exe = exe + self.depfile = depfile + self.capture = capture + self.depends: T.List[T.Union[BuildTarget, 'CustomTarget']] = depends or [] + self.arglist = arguments + self.outputs = output + self.name = name + + def __repr__(self) -> str: + repr_str = "<{0}: {1}>" + return repr_str.format(self.__class__.__name__, self.exe) + + def get_exe(self) -> T.Union['Executable', programs.ExternalProgram]: + return self.exe + + def get_base_outnames(self, inname: str) -> T.List[str]: + plainname = os.path.basename(inname) + basename = os.path.splitext(plainname)[0] + bases = [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.outputs] + return bases + + def get_dep_outname(self, inname: str) -> T.List[str]: + if self.depfile is None: + raise InvalidArguments('Tried to get dep name for rule that does not have dependency file defined.') + plainname = os.path.basename(inname) + basename = os.path.splitext(plainname)[0] + return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) + + def get_arglist(self, inname: str) -> T.List[str]: + plainname = os.path.basename(inname) + basename = os.path.splitext(plainname)[0] + return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.arglist] + + @staticmethod + def is_parent_path(parent: str, trial: str) -> bool: + relpath = pathlib.PurePath(trial).relative_to(parent) + return relpath.parts[0] != '..' # For subdirs we can only go "down". + + def process_files(self, files: T.Iterable[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList']], + state: T.Union['Interpreter', 'ModuleState'], + preserve_path_from: T.Optional[str] = None, + extra_args: T.Optional[T.List[str]] = None) -> 'GeneratedList': + output = GeneratedList(self, state.subdir, preserve_path_from, extra_args=extra_args if extra_args is not None else []) + + for e in files: + if isinstance(e, CustomTarget): + output.depends.add(e) + if isinstance(e, CustomTargetIndex): + output.depends.add(e.target) + + if isinstance(e, (CustomTarget, CustomTargetIndex, GeneratedList)): + self.depends.append(e) # BUG: this should go in the GeneratedList object, not this object. + fs = [File.from_built_file(state.subdir, f) for f in e.get_outputs()] + elif isinstance(e, str): + fs = [File.from_source_file(state.environment.source_dir, state.subdir, e)] + else: + fs = [e] + + for f in fs: + if preserve_path_from: + abs_f = f.absolute_path(state.environment.source_dir, state.environment.build_dir) + if not self.is_parent_path(preserve_path_from, abs_f): + raise InvalidArguments('generator.process: When using preserve_path_from, all input files must be in a subdirectory of the given dir.') + output.add_file(f, state) + return output + + +class GeneratedList(HoldableObject): + + """The output of generator.process.""" + + def __init__(self, generator: Generator, subdir: str, + preserve_path_from: T.Optional[str], + extra_args: T.List[str]): + self.generator = generator + self.name = generator.exe + self.depends: T.Set['CustomTarget'] = set() # Things this target depends on (because e.g. a custom target was used as input) + self.subdir = subdir + self.infilelist: T.List['File'] = [] + self.outfilelist: T.List[str] = [] + self.outmap: T.Dict[File, T.List[str]] = {} + self.extra_depends = [] # XXX: Doesn't seem to be used? + self.depend_files: T.List[File] = [] + self.preserve_path_from = preserve_path_from + self.extra_args: T.List[str] = extra_args if extra_args is not None else [] + + if isinstance(self.generator.exe, programs.ExternalProgram): + if not self.generator.exe.found(): + raise InvalidArguments('Tried to use not-found external program as generator') + path = self.generator.exe.get_path() + if os.path.isabs(path): + # Can only add a dependency on an external program which we + # know the absolute path of + self.depend_files.append(File.from_absolute_file(path)) + + def add_preserved_path_segment(self, infile: File, outfiles: T.List[str], state: T.Union['Interpreter', 'ModuleState']) -> T.List[str]: + result: T.List[str] = [] + in_abs = infile.absolute_path(state.environment.source_dir, state.environment.build_dir) + assert os.path.isabs(self.preserve_path_from) + rel = os.path.relpath(in_abs, self.preserve_path_from) + path_segment = os.path.dirname(rel) + for of in outfiles: + result.append(os.path.join(path_segment, of)) + return result + + def add_file(self, newfile: File, state: T.Union['Interpreter', 'ModuleState']) -> None: + self.infilelist.append(newfile) + outfiles = self.generator.get_base_outnames(newfile.fname) + if self.preserve_path_from: + outfiles = self.add_preserved_path_segment(newfile, outfiles, state) + self.outfilelist += outfiles + self.outmap[newfile] = outfiles + + def get_inputs(self) -> T.List['File']: + return self.infilelist + + def get_outputs(self) -> T.List[str]: + return self.outfilelist + + def get_outputs_for(self, filename: 'File') -> T.List[str]: + return self.outmap[filename] + + def get_generator(self) -> 'Generator': + return self.generator + + def get_extra_args(self) -> T.List[str]: + return self.extra_args + + def get_subdir(self) -> str: + return self.subdir + + +class Executable(BuildTarget): + known_kwargs = known_exe_kwargs + + def __init__(self, name: str, subdir: str, subproject: str, for_machine: MachineChoice, + sources: T.List[File], objects, environment: environment.Environment, kwargs): + self.typename = 'executable' + key = OptionKey('b_pie') + if 'pie' not in kwargs and key in environment.coredata.options: + kwargs['pie'] = environment.coredata.options[key].value + super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) + # Unless overridden, executables have no suffix or prefix. Except on + # Windows and with C#/Mono executables where the suffix is 'exe' + if not hasattr(self, 'prefix'): + self.prefix = '' + if not hasattr(self, 'suffix'): + machine = environment.machines[for_machine] + # Executable for Windows or C#/Mono + if machine.is_windows() or machine.is_cygwin() or 'cs' in self.compilers: + self.suffix = 'exe' + elif machine.system.startswith('wasm') or machine.system == 'emscripten': + self.suffix = 'js' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('arm') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('arm')): + self.suffix = 'axf' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')): + self.suffix = 'abs' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('xc16')): + self.suffix = 'elf' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('c2000') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('c2000')): + self.suffix = 'out' + else: + self.suffix = environment.machines[for_machine].get_exe_suffix() + self.filename = self.name + if self.suffix: + self.filename += '.' + self.suffix + self.outputs = [self.filename] + + # The import library this target will generate + self.import_filename = None + # The import library that Visual Studio would generate (and accept) + self.vs_import_filename = None + # The import library that GCC would generate (and prefer) + self.gcc_import_filename = None + # The debugging information file this target will generate + self.debug_filename = None + + # Check for export_dynamic + self.export_dynamic = False + if kwargs.get('export_dynamic'): + if not isinstance(kwargs['export_dynamic'], bool): + raise InvalidArguments('"export_dynamic" keyword argument must be a boolean') + self.export_dynamic = True + if kwargs.get('implib'): + self.export_dynamic = True + if self.export_dynamic and kwargs.get('implib') is False: + raise InvalidArguments('"implib" keyword argument must not be false for if "export_dynamic" is true') + + m = environment.machines[for_machine] + + # If using export_dynamic, set the import library name + if self.export_dynamic: + implib_basename = self.name + '.exe' + if not isinstance(kwargs.get('implib', False), bool): + implib_basename = kwargs['implib'] + if m.is_windows() or m.is_cygwin(): + self.vs_import_filename = f'{implib_basename}.lib' + self.gcc_import_filename = f'lib{implib_basename}.a' + if self.get_using_msvc(): + self.import_filename = self.vs_import_filename + else: + self.import_filename = self.gcc_import_filename + + if m.is_windows() and ('cs' in self.compilers or + self.uses_rust() or + self.get_using_msvc()): + self.debug_filename = self.name + '.pdb' + + # Only linkwithable if using export_dynamic + self.is_linkwithable = self.export_dynamic + + # Remember that this exe was returned by `find_program()` through an override + self.was_returned_by_find_program = False + + def get_default_install_dir(self, environment: environment.Environment) -> str: + return environment.get_bindir() + + def description(self): + '''Human friendly description of the executable''' + return self.name + + def type_suffix(self): + return "@exe" + + def get_import_filename(self): + """ + The name of the import library that will be outputted by the compiler + + Returns None if there is no import library required for this platform + """ + return self.import_filename + + def get_import_filenameslist(self): + if self.import_filename: + return [self.vs_import_filename, self.gcc_import_filename] + return [] + + def get_debug_filename(self): + """ + The name of debuginfo file that will be created by the compiler + + Returns None if the build won't create any debuginfo file + """ + return self.debug_filename + + def is_linkable_target(self): + return self.is_linkwithable + +class StaticLibrary(BuildTarget): + known_kwargs = known_stlib_kwargs + + def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs): + self.typename = 'static library' + super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) + if 'cs' in self.compilers: + raise InvalidArguments('Static libraries not supported for C#.') + if 'rust' in self.compilers: + # If no crate type is specified, or it's the generic lib type, use rlib + if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib': + mlog.debug('Defaulting Rust static library target crate type to rlib') + self.rust_crate_type = 'rlib' + # Don't let configuration proceed with a non-static crate type + elif self.rust_crate_type not in ['rlib', 'staticlib']: + raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for static libraries; must be "rlib" or "staticlib"') + # By default a static library is named libfoo.a even on Windows because + # MSVC does not have a consistent convention for what static libraries + # are called. The MSVC CRT uses libfoo.lib syntax but nothing else uses + # it and GCC only looks for static libraries called foo.lib and + # libfoo.a. However, we cannot use foo.lib because that's the same as + # the import library. Using libfoo.a is ok because people using MSVC + # always pass the library filename while linking anyway. + if not hasattr(self, 'prefix'): + self.prefix = 'lib' + if not hasattr(self, 'suffix'): + if 'rust' in self.compilers: + if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'rlib': + # default Rust static library suffix + self.suffix = 'rlib' + elif self.rust_crate_type == 'staticlib': + self.suffix = 'a' + else: + self.suffix = 'a' + self.filename = self.prefix + self.name + '.' + self.suffix + self.outputs = [self.filename] + self.prelink = kwargs.get('prelink', False) + if not isinstance(self.prelink, bool): + raise InvalidArguments('Prelink keyword argument must be a boolean.') + + def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]: + return {} + + def get_default_install_dir(self, environment): + return environment.get_static_lib_dir() + + def type_suffix(self): + return "@sta" + + def process_kwargs(self, kwargs, environment): + super().process_kwargs(kwargs, environment) + if 'rust_crate_type' in kwargs: + rust_crate_type = kwargs['rust_crate_type'] + if isinstance(rust_crate_type, str): + self.rust_crate_type = rust_crate_type + else: + raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.') + + def is_linkable_target(self): + return True + +class SharedLibrary(BuildTarget): + known_kwargs = known_shlib_kwargs + + def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs): + self.typename = 'shared library' + self.soversion = None + self.ltversion = None + # Max length 2, first element is compatibility_version, second is current_version + self.darwin_versions = [] + self.vs_module_defs = None + # The import library this target will generate + self.import_filename = None + # The import library that Visual Studio would generate (and accept) + self.vs_import_filename = None + # The import library that GCC would generate (and prefer) + self.gcc_import_filename = None + # The debugging information file this target will generate + self.debug_filename = None + # Use by the pkgconfig module + self.shared_library_only = False + super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) + if 'rust' in self.compilers: + # If no crate type is specified, or it's the generic lib type, use dylib + if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib': + mlog.debug('Defaulting Rust dynamic library target crate type to "dylib"') + self.rust_crate_type = 'dylib' + # Don't let configuration proceed with a non-dynamic crate type + elif self.rust_crate_type not in ['dylib', 'cdylib']: + raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for dynamic libraries; must be "dylib" or "cdylib"') + if not hasattr(self, 'prefix'): + self.prefix = None + if not hasattr(self, 'suffix'): + self.suffix = None + self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + self.determine_filenames(environment) + + def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]: + result: T.Dict[str, str] = {} + mappings = self.get_transitive_link_deps_mapping(prefix, environment) + old = get_target_macos_dylib_install_name(self) + if old not in mappings: + fname = self.get_filename() + outdirs, _ = self.get_install_dir(self.environment) + new = os.path.join(prefix, outdirs[0], fname) + result.update({old: new}) + mappings.update(result) + return mappings + + def get_default_install_dir(self, environment): + return environment.get_shared_lib_dir() + + def determine_filenames(self, env): + """ + See https://github.com/mesonbuild/meson/pull/417 for details. + + First we determine the filename template (self.filename_tpl), then we + set the output filename (self.filename). + + The template is needed while creating aliases (self.get_aliases), + which are needed while generating .so shared libraries for Linux. + + Besides this, there's also the import library name, which is only used + on Windows since on that platform the linker uses a separate library + called the "import library" during linking instead of the shared + library (DLL). The toolchain will output an import library in one of + two formats: GCC or Visual Studio. + + When we're building with Visual Studio, the import library that will be + generated by the toolchain is self.vs_import_filename, and with + MinGW/GCC, it's self.gcc_import_filename. self.import_filename will + always contain the import library name this target will generate. + """ + prefix = '' + suffix = '' + create_debug_file = False + self.filename_tpl = self.basic_filename_tpl + # NOTE: manual prefix/suffix override is currently only tested for C/C++ + # C# and Mono + if 'cs' in self.compilers: + prefix = '' + suffix = 'dll' + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + create_debug_file = True + # C, C++, Swift, Vala + # Only Windows uses a separate import library for linking + # For all other targets/platforms import_filename stays None + elif env.machines[self.for_machine].is_windows(): + suffix = 'dll' + self.vs_import_filename = '{}{}.lib'.format(self.prefix if self.prefix is not None else '', self.name) + self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) + if self.uses_rust(): + # Shared library is of the form foo.dll + prefix = '' + # Import library is called foo.dll.lib + self.import_filename = f'{self.name}.dll.lib' + create_debug_file = True + elif self.get_using_msvc(): + # Shared library is of the form foo.dll + prefix = '' + # Import library is called foo.lib + self.import_filename = self.vs_import_filename + create_debug_file = True + # Assume GCC-compatible naming + else: + # Shared library is of the form libfoo.dll + prefix = 'lib' + # Import library is called libfoo.dll.a + self.import_filename = self.gcc_import_filename + # Shared library has the soversion if it is defined + if self.soversion: + self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}' + else: + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + elif env.machines[self.for_machine].is_cygwin(): + suffix = 'dll' + self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) + # Shared library is of the form cygfoo.dll + # (ld --dll-search-prefix=cyg is the default) + prefix = 'cyg' + # Import library is called libfoo.dll.a + self.import_filename = self.gcc_import_filename + if self.soversion: + self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}' + else: + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + elif env.machines[self.for_machine].is_darwin(): + prefix = 'lib' + suffix = 'dylib' + # On macOS, the filename can only contain the major version + if self.soversion: + # libfoo.X.dylib + self.filename_tpl = '{0.prefix}{0.name}.{0.soversion}.{0.suffix}' + else: + # libfoo.dylib + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + elif env.machines[self.for_machine].is_android(): + prefix = 'lib' + suffix = 'so' + # Android doesn't support shared_library versioning + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + else: + prefix = 'lib' + suffix = 'so' + if self.ltversion: + # libfoo.so.X[.Y[.Z]] (.Y and .Z are optional) + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.ltversion}' + elif self.soversion: + # libfoo.so.X + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.soversion}' + else: + # No versioning, libfoo.so + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + if self.prefix is None: + self.prefix = prefix + if self.suffix is None: + self.suffix = suffix + self.filename = self.filename_tpl.format(self) + self.outputs = [self.filename] + if create_debug_file: + self.debug_filename = os.path.splitext(self.filename)[0] + '.pdb' + + @staticmethod + def _validate_darwin_versions(darwin_versions): + try: + if isinstance(darwin_versions, int): + darwin_versions = str(darwin_versions) + if isinstance(darwin_versions, str): + darwin_versions = 2 * [darwin_versions] + if not isinstance(darwin_versions, list): + raise InvalidArguments('Shared library darwin_versions: must be a string, integer,' + f'or a list, not {darwin_versions!r}') + if len(darwin_versions) > 2: + raise InvalidArguments('Shared library darwin_versions: list must contain 2 or fewer elements') + if len(darwin_versions) == 1: + darwin_versions = 2 * darwin_versions + for i, v in enumerate(darwin_versions[:]): + if isinstance(v, int): + v = str(v) + if not isinstance(v, str): + raise InvalidArguments('Shared library darwin_versions: list elements ' + f'must be strings or integers, not {v!r}') + if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', v): + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z where ' + 'X, Y, Z are numbers, and Y and Z are optional') + parts = v.split('.') + if len(parts) in (1, 2, 3) and int(parts[0]) > 65535: + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z ' + 'where X is [0, 65535] and Y, Z are optional') + if len(parts) in (2, 3) and int(parts[1]) > 255: + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z ' + 'where Y is [0, 255] and Y, Z are optional') + if len(parts) == 3 and int(parts[2]) > 255: + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z ' + 'where Z is [0, 255] and Y, Z are optional') + darwin_versions[i] = v + except ValueError: + raise InvalidArguments('Shared library darwin_versions: value is invalid') + return darwin_versions + + def process_kwargs(self, kwargs, environment): + super().process_kwargs(kwargs, environment) + + if not self.environment.machines[self.for_machine].is_android(): + supports_versioning = True + else: + supports_versioning = False + + if supports_versioning: + # Shared library version + if 'version' in kwargs: + self.ltversion = kwargs['version'] + if not isinstance(self.ltversion, str): + raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__) + if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion): + raise InvalidArguments(f'Invalid Shared library version "{self.ltversion}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.') + # Try to extract/deduce the soversion + if 'soversion' in kwargs: + self.soversion = kwargs['soversion'] + if isinstance(self.soversion, int): + self.soversion = str(self.soversion) + if not isinstance(self.soversion, str): + raise InvalidArguments('Shared library soversion is not a string or integer.') + elif self.ltversion: + # library version is defined, get the soversion from that + # We replicate what Autotools does here and take the first + # number of the version by default. + self.soversion = self.ltversion.split('.')[0] + # macOS, iOS and tvOS dylib compatibility_version and current_version + if 'darwin_versions' in kwargs: + self.darwin_versions = self._validate_darwin_versions(kwargs['darwin_versions']) + elif self.soversion: + # If unspecified, pick the soversion + self.darwin_versions = 2 * [self.soversion] + + # Visual Studio module-definitions file + if 'vs_module_defs' in kwargs: + path = kwargs['vs_module_defs'] + if isinstance(path, str): + if os.path.isabs(path): + self.vs_module_defs = File.from_absolute_file(path) + else: + self.vs_module_defs = File.from_source_file(environment.source_dir, self.subdir, path) + self.link_depends.append(self.vs_module_defs) + elif isinstance(path, File): + # When passing a generated file. + self.vs_module_defs = path + self.link_depends.append(path) + elif hasattr(path, 'get_filename'): + # When passing output of a Custom Target + path = File.from_built_file(path.subdir, path.get_filename()) + self.vs_module_defs = path + self.link_depends.append(path) + else: + raise InvalidArguments( + 'Shared library vs_module_defs must be either a string, ' + 'a file object or a Custom Target') + if 'rust_crate_type' in kwargs: + rust_crate_type = kwargs['rust_crate_type'] + if isinstance(rust_crate_type, str): + self.rust_crate_type = rust_crate_type + else: + raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.') + + def get_import_filename(self): + """ + The name of the import library that will be outputted by the compiler + + Returns None if there is no import library required for this platform + """ + return self.import_filename + + def get_debug_filename(self): + """ + The name of debuginfo file that will be created by the compiler + + Returns None if the build won't create any debuginfo file + """ + return self.debug_filename + + def get_import_filenameslist(self): + if self.import_filename: + return [self.vs_import_filename, self.gcc_import_filename] + return [] + + def get_all_link_deps(self): + return [self] + self.get_transitive_link_deps() + + def get_aliases(self) -> T.Dict[str, str]: + """ + If the versioned library name is libfoo.so.0.100.0, aliases are: + * libfoo.so.0 (soversion) -> libfoo.so.0.100.0 + * libfoo.so (unversioned; for linking) -> libfoo.so.0 + Same for dylib: + * libfoo.dylib (unversioned; for linking) -> libfoo.0.dylib + """ + aliases: T.Dict[str, str] = {} + # Aliases are only useful with .so and .dylib libraries. Also if + # there's no self.soversion (no versioning), we don't need aliases. + if self.suffix not in ('so', 'dylib') or not self.soversion: + return aliases + # With .so libraries, the minor and micro versions are also in the + # filename. If ltversion != soversion we create an soversion alias: + # libfoo.so.0 -> libfoo.so.0.100.0 + # Where libfoo.so.0.100.0 is the actual library + if self.suffix == 'so' and self.ltversion and self.ltversion != self.soversion: + alias_tpl = self.filename_tpl.replace('ltversion', 'soversion') + ltversion_filename = alias_tpl.format(self) + aliases[ltversion_filename] = self.filename + # libfoo.so.0/libfoo.0.dylib is the actual library + else: + ltversion_filename = self.filename + # Unversioned alias: + # libfoo.so -> libfoo.so.0 + # libfoo.dylib -> libfoo.0.dylib + aliases[self.basic_filename_tpl.format(self)] = ltversion_filename + return aliases + + def type_suffix(self): + return "@sha" + + def is_linkable_target(self): + return True + +# A shared library that is meant to be used with dlopen rather than linking +# into something else. +class SharedModule(SharedLibrary): + known_kwargs = known_shmod_kwargs + + def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs): + if 'version' in kwargs: + raise MesonException('Shared modules must not specify the version kwarg.') + if 'soversion' in kwargs: + raise MesonException('Shared modules must not specify the soversion kwarg.') + super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) + self.typename = 'shared module' + + def get_default_install_dir(self, environment): + return environment.get_shared_module_dir() + +class BothLibraries(SecondLevelHolder): + def __init__(self, shared: SharedLibrary, static: StaticLibrary) -> None: + self._preferred_library = 'shared' + self.shared = shared + self.static = static + self.subproject = self.shared.subproject + + def __repr__(self) -> str: + return f'' + + def get_default_object(self) -> BuildTarget: + if self._preferred_library == 'shared': + return self.shared + elif self._preferred_library == 'static': + return self.static + raise MesonBugException(f'self._preferred_library == "{self._preferred_library}" is neither "shared" nor "static".') + +class CommandBase: + def flatten_command(self, cmd): + cmd = listify(cmd) + final_cmd = [] + for c in cmd: + if isinstance(c, str): + final_cmd.append(c) + elif isinstance(c, File): + self.depend_files.append(c) + final_cmd.append(c) + elif isinstance(c, programs.ExternalProgram): + if not c.found(): + raise InvalidArguments('Tried to use not-found external program in "command"') + path = c.get_path() + if os.path.isabs(path): + # Can only add a dependency on an external program which we + # know the absolute path of + self.depend_files.append(File.from_absolute_file(path)) + final_cmd += c.get_command() + elif isinstance(c, (BuildTarget, CustomTarget)): + self.dependencies.append(c) + final_cmd.append(c) + elif isinstance(c, list): + final_cmd += self.flatten_command(c) + else: + raise InvalidArguments(f'Argument {c!r} in "command" is invalid') + return final_cmd + +class CustomTarget(Target, CommandBase): + known_kwargs = { + 'input', + 'output', + 'command', + 'capture', + 'feed', + 'install', + 'install_dir', + 'install_mode', + 'build_always', + 'build_always_stale', + 'depends', + 'depend_files', + 'depfile', + 'build_by_default', + 'override_options', + 'console', + 'env', + } + + def __init__(self, name: str, subdir: str, subproject: str, kwargs: T.Dict[str, T.Any], + absolute_paths: bool = False, backend: T.Optional['Backend'] = None): + self.typename = 'custom' + # TODO expose keyword arg to make MachineChoice.HOST configurable + super().__init__(name, subdir, subproject, False, MachineChoice.HOST) + self.dependencies: T.List[T.Union[CustomTarget, BuildTarget]] = [] + self.extra_depends = [] + self.depend_files = [] # Files that this target depends on but are not on the command line. + self.depfile = None + self.process_kwargs(kwargs, backend) + # Whether to use absolute paths for all files on the commandline + self.absolute_paths = absolute_paths + unknowns = [] + for k in kwargs: + if k not in CustomTarget.known_kwargs: + unknowns.append(k) + if unknowns: + mlog.warning('Unknown keyword arguments in target {}: {}'.format(self.name, ', '.join(unknowns))) + + def get_default_install_dir(self, environment): + return None + + def __repr__(self): + repr_str = "<{0} {1}: {2}>" + return repr_str.format(self.__class__.__name__, self.get_id(), self.command) + + def get_target_dependencies(self): + deps = self.dependencies[:] + deps += self.extra_depends + for c in self.sources: + if isinstance(c, (BuildTarget, CustomTarget)): + deps.append(c) + return deps + + def get_transitive_build_target_deps(self): + ''' + Recursively fetch the build targets that this custom target depends on, + whether through `command:`, `depends:`, or `sources:` The recursion is + only performed on custom targets. + This is useful for setting PATH on Windows for finding required DLLs. + F.ex, if you have a python script that loads a C module that links to + other DLLs in your project. + ''' + bdeps = set() + deps = self.get_target_dependencies() + for d in deps: + if isinstance(d, BuildTarget): + bdeps.add(d) + elif isinstance(d, CustomTarget): + bdeps.update(d.get_transitive_build_target_deps()) + return bdeps + + def process_kwargs(self, kwargs, backend): + self.process_kwargs_base(kwargs) + self.sources = extract_as_list(kwargs, 'input') + if 'output' not in kwargs: + raise InvalidArguments('Missing keyword argument "output".') + self.outputs = listify(kwargs['output']) + # This will substitute values from the input into output and return it. + inputs = get_sources_string_names(self.sources, backend) + values = get_filenames_templates_dict(inputs, []) + for i in self.outputs: + if not(isinstance(i, str)): + raise InvalidArguments('Output argument not a string.') + if i == '': + raise InvalidArguments('Output must not be empty.') + if i.strip() == '': + raise InvalidArguments('Output must not consist only of whitespace.') + if has_path_sep(i): + raise InvalidArguments(f'Output {i!r} must not contain a path segment.') + if '@INPUT@' in i or '@INPUT0@' in i: + m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \ + 'mean @PLAINNAME@ or @BASENAME@?' + raise InvalidArguments(m) + # We already check this during substitution, but the error message + # will be unclear/confusing, so check it here. + if len(inputs) != 1 and ('@PLAINNAME@' in i or '@BASENAME@' in i): + m = "Output cannot contain @PLAINNAME@ or @BASENAME@ when " \ + "there is more than one input (we can't know which to use)" + raise InvalidArguments(m) + self.outputs = substitute_values(self.outputs, values) + self.capture = kwargs.get('capture', False) + if self.capture and len(self.outputs) != 1: + raise InvalidArguments('Capturing can only output to a single file.') + self.feed = kwargs.get('feed', False) + if self.feed and len(self.sources) != 1: + raise InvalidArguments('Feeding can only input from a single file.') + self.console = kwargs.get('console', False) + if not isinstance(self.console, bool): + raise InvalidArguments('"console" kwarg only accepts booleans') + if self.capture and self.console: + raise InvalidArguments("Can't both capture output and output to console") + if 'command' not in kwargs: + raise InvalidArguments('Missing keyword argument "command".') + if 'depfile' in kwargs: + depfile = kwargs['depfile'] + if not isinstance(depfile, str): + raise InvalidArguments('Depfile must be a string.') + if os.path.basename(depfile) != depfile: + raise InvalidArguments('Depfile must be a plain filename without a subdirectory.') + self.depfile = depfile + self.command = self.flatten_command(kwargs['command']) + for c in self.command: + if self.capture and isinstance(c, str) and '@OUTPUT@' in c: + raise InvalidArguments('@OUTPUT@ is not allowed when capturing output.') + if self.feed and isinstance(c, str) and '@INPUT@' in c: + raise InvalidArguments('@INPUT@ is not allowed when feeding input.') + if 'install' in kwargs: + self.install = kwargs['install'] + if not isinstance(self.install, bool): + raise InvalidArguments('"install" must be boolean.') + if self.install: + if 'install_dir' not in kwargs: + raise InvalidArguments('"install_dir" must be specified ' + 'when installing a target') + + if isinstance(kwargs['install_dir'], list): + FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject) + # If an item in this list is False, the output corresponding to + # the list index of that item will not be installed + self.install_dir = typeslistify(kwargs['install_dir'], (str, bool)) + self.install_mode = kwargs.get('install_mode', None) + else: + self.install = False + self.install_dir = [None] + self.install_mode = None + if 'build_always' in kwargs and 'build_always_stale' in kwargs: + raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.') + elif 'build_always' in kwargs: + if 'build_by_default' not in kwargs: + self.build_by_default = kwargs['build_always'] + self.build_always_stale = kwargs['build_always'] + elif 'build_always_stale' in kwargs: + self.build_always_stale = kwargs['build_always_stale'] + if not isinstance(self.build_always_stale, bool): + raise InvalidArguments('Argument build_always_stale must be a boolean.') + extra_deps, depend_files = [extract_as_list(kwargs, c, pop=False) for c in ['depends', 'depend_files']] + for ed in extra_deps: + if not isinstance(ed, (CustomTarget, BuildTarget)): + raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target ' + f'(executable or a library) got: {type(ed)}({ed})') + self.extra_depends.append(ed) + for i in depend_files: + if isinstance(i, (File, str)): + self.depend_files.append(i) + else: + mlog.debug(i) + raise InvalidArguments(f'Unknown type {type(i).__name__!r} in depend_files.') + self.env = kwargs.get('env') + + def get_dependencies(self): + return self.dependencies + + def should_install(self) -> bool: + return self.install + + def get_custom_install_dir(self): + return self.install_dir + + def get_custom_install_mode(self): + return self.install_mode + + def get_outputs(self) -> T.List[str]: + return self.outputs + + def get_filename(self): + return self.outputs[0] + + def get_sources(self): + return self.sources + + def get_generated_lists(self): + genlists = [] + for c in self.sources: + if isinstance(c, GeneratedList): + genlists.append(c) + return genlists + + def get_generated_sources(self): + return self.get_generated_lists() + + def get_dep_outname(self, infilenames): + if self.depfile is None: + raise InvalidArguments('Tried to get depfile name for custom_target that does not have depfile defined.') + if infilenames: + plainname = os.path.basename(infilenames[0]) + basename = os.path.splitext(plainname)[0] + return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) + else: + if '@BASENAME@' in self.depfile or '@PLAINNAME@' in self.depfile: + raise InvalidArguments('Substitution in depfile for custom_target that does not have an input file.') + return self.depfile + + def is_linkable_target(self): + if len(self.outputs) != 1: + return False + suf = os.path.splitext(self.outputs[0])[-1] + if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so' or suf == '.dylib': + return True + + def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]: + return {} + + def get_link_dep_subdirs(self): + return OrderedSet() + + def get_all_link_deps(self): + return [] + + def is_internal(self) -> bool: + if not self.should_install(): + return True + for out in self.get_outputs(): + # Can't check if this is a static library, so try to guess + if not out.endswith(('.a', '.lib')): + return False + return True + + def extract_all_objects_recurse(self): + return self.get_outputs() + + def type_suffix(self): + return "@cus" + + def __getitem__(self, index: int) -> 'CustomTargetIndex': + return CustomTargetIndex(self, self.outputs[index]) + + def __setitem__(self, index, value): + raise NotImplementedError + + def __delitem__(self, index): + raise NotImplementedError + + def __iter__(self): + for i in self.outputs: + yield CustomTargetIndex(self, i) + +class RunTarget(Target, CommandBase): + def __init__(self, name, command, dependencies, subdir, subproject, env=None): + self.typename = 'run' + # These don't produce output artifacts + super().__init__(name, subdir, subproject, False, MachineChoice.BUILD) + self.dependencies = dependencies + self.depend_files = [] + self.command = self.flatten_command(command) + self.absolute_paths = False + self.env = env + + def __repr__(self): + repr_str = "<{0} {1}: {2}>" + return repr_str.format(self.__class__.__name__, self.get_id(), self.command[0]) + + def process_kwargs(self, kwargs): + return self.process_kwargs_base(kwargs) + + def get_dependencies(self): + return self.dependencies + + def get_generated_sources(self): + return [] + + def get_sources(self): + return [] + + def should_install(self) -> bool: + return False + + def get_filename(self) -> str: + return self.name + + def get_outputs(self) -> T.List[str]: + if isinstance(self.name, str): + return [self.name] + elif isinstance(self.name, list): + return self.name + else: + raise RuntimeError('RunTarget: self.name is neither a list nor a string. This is a bug') + + def type_suffix(self): + return "@run" + +class AliasTarget(RunTarget): + def __init__(self, name, dependencies, subdir, subproject): + super().__init__(name, [], dependencies, subdir, subproject) + +class Jar(BuildTarget): + known_kwargs = known_jar_kwargs + + def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs): + self.typename = 'jar' + super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) + for s in self.sources: + if not s.endswith('.java'): + raise InvalidArguments(f'Jar source {s} is not a java file.') + for t in self.link_targets: + if not isinstance(t, Jar): + raise InvalidArguments(f'Link target {t} is not a jar target.') + self.filename = self.name + '.jar' + self.outputs = [self.filename] + self.java_args = kwargs.get('java_args', []) + + def get_main_class(self): + return self.main_class + + def type_suffix(self): + return "@jar" + + def get_java_args(self): + return self.java_args + + def validate_install(self, environment): + # All jar targets are installable. + pass + + def is_linkable_target(self): + return True + + def get_classpath_args(self): + cp_paths = [os.path.join(l.get_subdir(), l.get_filename()) for l in self.link_targets] + cp_string = os.pathsep.join(cp_paths) + if cp_string: + return ['-cp', os.pathsep.join(cp_paths)] + return [] + +class CustomTargetIndex(HoldableObject): + + """A special opaque object returned by indexing a CustomTarget. This object + exists in Meson, but acts as a proxy in the backends, making targets depend + on the CustomTarget it's derived from, but only adding one source file to + the sources. + """ + + def __init__(self, target: CustomTarget, output: int): + self.typename = 'custom' + self.target = target + self.output = output + self.for_machine = target.for_machine + + def __repr__(self): + return ''.format( + self.target, self.target.get_outputs().index(self.output)) + + def get_outputs(self) -> T.List[str]: + return [self.output] + + def get_subdir(self) -> str: + return self.target.get_subdir() + + def get_filename(self): + return self.output + + def get_id(self): + return self.target.get_id() + + def get_all_link_deps(self): + return self.target.get_all_link_deps() + + def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]: + return self.target.get_link_deps_mapping(prefix, environment) + + def get_link_dep_subdirs(self): + return self.target.get_link_dep_subdirs() + + def is_linkable_target(self): + suf = os.path.splitext(self.output)[-1] + if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so': + return True + + def should_install(self) -> bool: + return self.target.should_install() + + def is_internal(self) -> bool: + return self.target.is_internal() + + def extract_all_objects_recurse(self): + return self.target.extract_all_objects_recurse() + + def get_custom_install_dir(self): + return self.target.get_custom_install_dir() + +class ConfigurationData(HoldableObject): + def __init__(self) -> None: + super().__init__() + self.values: T.Dict[ + str, + T.Tuple[ + T.Union[str, int, bool], + T.Optional[str] + ] + ] = {} + + def __repr__(self): + return repr(self.values) + + def __contains__(self, value: str) -> bool: + return value in self.values + + def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]: + return self.values[name] # (val, desc) + + def keys(self) -> T.Iterator[str]: + return self.values.keys() + +# A bit poorly named, but this represents plain data files to copy +# during install. +class Data(HoldableObject): + def __init__(self, sources: T.List[File], install_dir: str, + install_mode: 'FileMode', subproject: str, + rename: T.List[str] = None): + self.sources = sources + self.install_dir = install_dir + self.install_mode = install_mode + if rename is None: + self.rename = [os.path.basename(f.fname) for f in self.sources] + else: + self.rename = rename + self.subproject = subproject + +class TestSetup: + def __init__(self, exe_wrapper: T.Optional[T.List[str]], gdb: bool, + timeout_multiplier: int, env: EnvironmentVariables, + exclude_suites: T.List[str]): + self.exe_wrapper = exe_wrapper + self.gdb = gdb + self.timeout_multiplier = timeout_multiplier + self.env = env + self.exclude_suites = exclude_suites + +def get_sources_string_names(sources, backend): + ''' + For the specified list of @sources which can be strings, Files, or targets, + get all the output basenames. + ''' + names = [] + for s in sources: + if isinstance(s, str): + names.append(s) + elif isinstance(s, (BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList)): + names += s.get_outputs() + elif isinstance(s, ExtractedObjects): + names += s.get_outputs(backend) + elif isinstance(s, File): + names.append(s.fname) + else: + raise AssertionError(f'Unknown source type: {s!r}') + return names + +def load(build_dir: str) -> Build: + filename = os.path.join(build_dir, 'meson-private', 'build.dat') + load_fail_msg = f'Build data file {filename!r} is corrupted. Try with a fresh build tree.' + nonexisting_fail_msg = f'No such build data file as "{filename!r}".' + try: + with open(filename, 'rb') as f: + obj = pickle.load(f) + except FileNotFoundError: + raise MesonException(nonexisting_fail_msg) + except (pickle.UnpicklingError, EOFError): + raise MesonException(load_fail_msg) + except AttributeError: + raise MesonException( + f"Build data file {filename!r} references functions or classes that don't " + "exist. This probably means that it was generated with an old " + "version of meson. Try running from the source directory " + f"meson {build_dir} --wipe") + if not isinstance(obj, Build): + raise MesonException(load_fail_msg) + return obj + +def save(obj: Build, filename: str) -> None: + with open(filename, 'wb') as f: + pickle.dump(obj, f) diff --git a/meson/mesonbuild/cmake/__init__.py b/meson/mesonbuild/cmake/__init__.py new file mode 100644 index 000000000..d39bf2424 --- /dev/null +++ b/meson/mesonbuild/cmake/__init__.py @@ -0,0 +1,46 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +__all__ = [ + 'CMakeClient', + 'CMakeExecutor', + 'CMakeExecScope', + 'CMakeException', + 'CMakeFileAPI', + 'CMakeInterpreter', + 'CMakeTarget', + 'CMakeToolchain', + 'CMakeTraceLine', + 'CMakeTraceParser', + 'SingleTargetOptions', + 'TargetOptions', + 'parse_generator_expressions', + 'language_map', + 'backend_generator_map', + 'cmake_get_generator_args', + 'cmake_defines_to_args', + 'check_cmake_args', +] + +from .common import CMakeException, SingleTargetOptions, TargetOptions, cmake_defines_to_args, language_map, backend_generator_map, cmake_get_generator_args, check_cmake_args +from .client import CMakeClient +from .executor import CMakeExecutor +from .fileapi import CMakeFileAPI +from .generator import parse_generator_expressions +from .interpreter import CMakeInterpreter +from .toolchain import CMakeToolchain, CMakeExecScope +from .traceparser import CMakeTarget, CMakeTraceLine, CMakeTraceParser diff --git a/meson/mesonbuild/cmake/client.py b/meson/mesonbuild/cmake/client.py new file mode 100644 index 000000000..bcbb52ef9 --- /dev/null +++ b/meson/mesonbuild/cmake/client.py @@ -0,0 +1,373 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +from .common import CMakeException, CMakeConfiguration, CMakeBuildFile +from .. import mlog +from contextlib import contextmanager +from subprocess import Popen, PIPE, TimeoutExpired +from pathlib import Path +import typing as T +import json + +if T.TYPE_CHECKING: + from ..environment import Environment + from .executor import CMakeExecutor + +CMAKE_SERVER_BEGIN_STR = '[== "CMake Server" ==[' +CMAKE_SERVER_END_STR = ']== "CMake Server" ==]' + +CMAKE_MESSAGE_TYPES = { + 'error': ['cookie', 'errorMessage'], + 'hello': ['supportedProtocolVersions'], + 'message': ['cookie', 'message'], + 'progress': ['cookie'], + 'reply': ['cookie', 'inReplyTo'], + 'signal': ['cookie', 'name'], +} # type: T.Dict[str, T.List[str]] + +CMAKE_REPLY_TYPES = { + 'handshake': [], + 'configure': [], + 'compute': [], + 'cmakeInputs': ['buildFiles', 'cmakeRootDirectory', 'sourceDirectory'], + 'codemodel': ['configurations'] +} # type: T.Dict[str, T.List[str]] + +# Base CMake server message classes + +class MessageBase: + def __init__(self, msg_type: str, cookie: str) -> None: + self.type = msg_type + self.cookie = cookie + + def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]: + return {'type': self.type, 'cookie': self.cookie} + + def log(self) -> None: + mlog.warning('CMake server message of type', mlog.bold(type(self).__name__), 'has no log function') + +class RequestBase(MessageBase): + cookie_counter = 0 + + def __init__(self, msg_type: str) -> None: + super().__init__(msg_type, self.gen_cookie()) + + @staticmethod + def gen_cookie() -> str: + RequestBase.cookie_counter += 1 + return f'meson_{RequestBase.cookie_counter}' + +class ReplyBase(MessageBase): + def __init__(self, cookie: str, in_reply_to: str) -> None: + super().__init__('reply', cookie) + self.in_reply_to = in_reply_to + +class SignalBase(MessageBase): + def __init__(self, cookie: str, signal_name: str) -> None: + super().__init__('signal', cookie) + self.signal_name = signal_name + + def log(self) -> None: + mlog.log(mlog.bold('CMake signal:'), mlog.yellow(self.signal_name)) + +# Special Message classes + +class Error(MessageBase): + def __init__(self, cookie: str, message: str) -> None: + super().__init__('error', cookie) + self.message = message + + def log(self) -> None: + mlog.error(mlog.bold('CMake server error:'), mlog.red(self.message)) + +class Message(MessageBase): + def __init__(self, cookie: str, message: str) -> None: + super().__init__('message', cookie) + self.message = message + + def log(self) -> None: + #mlog.log(mlog.bold('CMake:'), self.message) + pass + +class Progress(MessageBase): + def __init__(self, cookie: str) -> None: + super().__init__('progress', cookie) + + def log(self) -> None: + pass + +class MessageHello(MessageBase): + def __init__(self, supported_protocol_versions: T.List[T.Dict[str, int]]) -> None: + super().__init__('hello', '') + self.supported_protocol_versions = supported_protocol_versions + + def supports(self, major: int, minor: T.Optional[int] = None) -> bool: + for i in self.supported_protocol_versions: + assert 'major' in i + assert 'minor' in i + if major == i['major']: + if minor is None or minor == i['minor']: + return True + return False + +# Request classes + +class RequestHandShake(RequestBase): + def __init__(self, src_dir: Path, build_dir: Path, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None: + super().__init__('handshake') + self.src_dir = src_dir + self.build_dir = build_dir + self.generator = generator + self.vers_major = vers_major + self.vers_minor = vers_minor + + def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]: + vers = {'major': self.vers_major} + if self.vers_minor is not None: + vers['minor'] = self.vers_minor + + # Old CMake versions (3.7) want '/' even on Windows + self.src_dir = self.src_dir.resolve() + self.build_dir = self.build_dir.resolve() + + return { + **super().to_dict(), + 'sourceDirectory': self.src_dir.as_posix(), + 'buildDirectory': self.build_dir.as_posix(), + 'generator': self.generator, + 'protocolVersion': vers + } + +class RequestConfigure(RequestBase): + def __init__(self, args: T.Optional[T.List[str]] = None): + super().__init__('configure') + self.args = args + + def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]: + res = super().to_dict() + if self.args: + res['cacheArguments'] = self.args + return res + +class RequestCompute(RequestBase): + def __init__(self) -> None: + super().__init__('compute') + +class RequestCMakeInputs(RequestBase): + def __init__(self) -> None: + super().__init__('cmakeInputs') + +class RequestCodeModel(RequestBase): + def __init__(self) -> None: + super().__init__('codemodel') + +# Reply classes + +class ReplyHandShake(ReplyBase): + def __init__(self, cookie: str) -> None: + super().__init__(cookie, 'handshake') + +class ReplyConfigure(ReplyBase): + def __init__(self, cookie: str) -> None: + super().__init__(cookie, 'configure') + +class ReplyCompute(ReplyBase): + def __init__(self, cookie: str) -> None: + super().__init__(cookie, 'compute') + +class ReplyCMakeInputs(ReplyBase): + def __init__(self, cookie: str, cmake_root: Path, src_dir: Path, build_files: T.List[CMakeBuildFile]) -> None: + super().__init__(cookie, 'cmakeInputs') + self.cmake_root = cmake_root + self.src_dir = src_dir + self.build_files = build_files + + def log(self) -> None: + mlog.log('CMake root: ', mlog.bold(self.cmake_root.as_posix())) + mlog.log('Source dir: ', mlog.bold(self.src_dir.as_posix())) + mlog.log('Build files:', mlog.bold(str(len(self.build_files)))) + with mlog.nested(): + for i in self.build_files: + mlog.log(str(i)) + +class ReplyCodeModel(ReplyBase): + def __init__(self, data: T.Dict[str, T.Any]) -> None: + super().__init__(data['cookie'], 'codemodel') + self.configs = [] + for i in data['configurations']: + self.configs += [CMakeConfiguration(i)] + + def log(self) -> None: + mlog.log('CMake code mode:') + for idx, i in enumerate(self.configs): + mlog.log(f'Configuration {idx}:') + with mlog.nested(): + i.log() + +# Main client class + +class CMakeClient: + def __init__(self, env: 'Environment') -> None: + self.env = env + self.proc = None # type: T.Optional[Popen] + self.type_map = { + 'error': lambda data: Error(data['cookie'], data['errorMessage']), + 'hello': lambda data: MessageHello(data['supportedProtocolVersions']), + 'message': lambda data: Message(data['cookie'], data['message']), + 'progress': lambda data: Progress(data['cookie']), + 'reply': self.resolve_type_reply, + 'signal': lambda data: SignalBase(data['cookie'], data['name']) + } # type: T.Dict[str, T.Callable[[T.Dict[str, T.Any]], MessageBase]] + + self.reply_map = { + 'handshake': lambda data: ReplyHandShake(data['cookie']), + 'configure': lambda data: ReplyConfigure(data['cookie']), + 'compute': lambda data: ReplyCompute(data['cookie']), + 'cmakeInputs': self.resolve_reply_cmakeInputs, + 'codemodel': lambda data: ReplyCodeModel(data), + } # type: T.Dict[str, T.Callable[[T.Dict[str, T.Any]], ReplyBase]] + + def readMessageRaw(self) -> T.Dict[str, T.Any]: + assert self.proc is not None + rawData = [] + begin = False + while self.proc.poll() is None: + line = self.proc.stdout.readline() + if not line: + break + line = line.decode('utf-8') + line = line.strip() + + if begin and line == CMAKE_SERVER_END_STR: + break # End of the message + elif begin: + rawData += [line] + elif line == CMAKE_SERVER_BEGIN_STR: + begin = True # Begin of the message + + if rawData: + res = json.loads('\n'.join(rawData)) + assert isinstance(res, dict) + for i in res.keys(): + assert isinstance(i, str) + return res + raise CMakeException('Failed to read data from the CMake server') + + def readMessage(self) -> MessageBase: + raw_data = self.readMessageRaw() + if 'type' not in raw_data: + raise CMakeException('The "type" attribute is missing from the message') + msg_type = raw_data['type'] + func = self.type_map.get(msg_type, None) + if not func: + raise CMakeException(f'Recieved unknown message type "{msg_type}"') + for i in CMAKE_MESSAGE_TYPES[msg_type]: + if i not in raw_data: + raise CMakeException(f'Key "{i}" is missing from CMake server message type {msg_type}') + return func(raw_data) + + def writeMessage(self, msg: MessageBase) -> None: + raw_data = '\n{}\n{}\n{}\n'.format(CMAKE_SERVER_BEGIN_STR, json.dumps(msg.to_dict(), indent=2), CMAKE_SERVER_END_STR) + self.proc.stdin.write(raw_data.encode('ascii')) + self.proc.stdin.flush() + + def query(self, request: RequestBase) -> MessageBase: + self.writeMessage(request) + while True: + reply = self.readMessage() + if reply.cookie == request.cookie and reply.type in ['reply', 'error']: + return reply + + reply.log() + + def query_checked(self, request: RequestBase, message: str) -> MessageBase: + reply = self.query(request) + h = mlog.green('SUCCEEDED') if reply.type == 'reply' else mlog.red('FAILED') + mlog.log(message + ':', h) + if reply.type != 'reply': + reply.log() + raise CMakeException('CMake server query failed') + return reply + + def do_handshake(self, src_dir: Path, build_dir: Path, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None: + # CMake prints the hello message on startup + msg = self.readMessage() + if not isinstance(msg, MessageHello): + raise CMakeException('Recieved an unexpected message from the CMake server') + + request = RequestHandShake(src_dir, build_dir, generator, vers_major, vers_minor) + self.query_checked(request, 'CMake server handshake') + + def resolve_type_reply(self, data: T.Dict[str, T.Any]) -> ReplyBase: + reply_type = data['inReplyTo'] + func = self.reply_map.get(reply_type, None) + if not func: + raise CMakeException(f'Recieved unknown reply type "{reply_type}"') + for i in ['cookie'] + CMAKE_REPLY_TYPES[reply_type]: + if i not in data: + raise CMakeException(f'Key "{i}" is missing from CMake server message type {type}') + return func(data) + + def resolve_reply_cmakeInputs(self, data: T.Dict[str, T.Any]) -> ReplyCMakeInputs: + files = [] + for i in data['buildFiles']: + for j in i['sources']: + files += [CMakeBuildFile(Path(j), i['isCMake'], i['isTemporary'])] + return ReplyCMakeInputs(data['cookie'], Path(data['cmakeRootDirectory']), Path(data['sourceDirectory']), files) + + @contextmanager + def connect(self, cmake_exe: 'CMakeExecutor') -> T.Generator[None, None, None]: + self.startup(cmake_exe) + try: + yield + finally: + self.shutdown() + + def startup(self, cmake_exe: 'CMakeExecutor') -> None: + if self.proc is not None: + raise CMakeException('The CMake server was already started') + assert cmake_exe.found() + + mlog.debug('Starting CMake server with CMake', mlog.bold(' '.join(cmake_exe.get_command())), 'version', mlog.cyan(cmake_exe.version())) + self.proc = Popen(cmake_exe.get_command() + ['-E', 'server', '--experimental', '--debug'], stdin=PIPE, stdout=PIPE) + + def shutdown(self) -> None: + if self.proc is None: + return + + mlog.debug('Shutting down the CMake server') + + # Close the pipes to exit + self.proc.stdin.close() + self.proc.stdout.close() + + # Wait for CMake to finish + try: + self.proc.wait(timeout=2) + except TimeoutExpired: + # Terminate CMake if there is a timeout + # terminate() may throw a platform specific exception if the process has already + # terminated. This may be the case if there is a race condition (CMake exited after + # the timeout but before the terminate() call). Additionally, this behavior can + # also be triggered on cygwin if CMake crashes. + # See https://github.com/mesonbuild/meson/pull/4969#issuecomment-499413233 + try: + self.proc.terminate() + except Exception: + pass + + self.proc = None diff --git a/meson/mesonbuild/cmake/common.py b/meson/mesonbuild/cmake/common.py new file mode 100644 index 000000000..5cc154cb8 --- /dev/null +++ b/meson/mesonbuild/cmake/common.py @@ -0,0 +1,334 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +from ..mesonlib import MesonException, OptionKey +from .. import mlog +from pathlib import Path +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + +language_map = { + 'c': 'C', + 'cpp': 'CXX', + 'cuda': 'CUDA', + 'objc': 'OBJC', + 'objcpp': 'OBJCXX', + 'cs': 'CSharp', + 'java': 'Java', + 'fortran': 'Fortran', + 'swift': 'Swift', +} + +backend_generator_map = { + 'ninja': 'Ninja', + 'xcode': 'Xcode', + 'vs2010': 'Visual Studio 10 2010', + 'vs2012': 'Visual Studio 11 2012', + 'vs2013': 'Visual Studio 12 2013', + 'vs2015': 'Visual Studio 14 2015', + 'vs2017': 'Visual Studio 15 2017', + 'vs2019': 'Visual Studio 16 2019', +} + +blacklist_cmake_defs = [ + 'CMAKE_TOOLCHAIN_FILE', + 'CMAKE_PROJECT_INCLUDE', + 'MESON_PRELOAD_FILE', + 'MESON_PS_CMAKE_CURRENT_BINARY_DIR', + 'MESON_PS_CMAKE_CURRENT_SOURCE_DIR', + 'MESON_PS_DELAYED_CALLS', + 'MESON_PS_LOADED', + 'MESON_FIND_ROOT_PATH', + 'MESON_CMAKE_SYSROOT', + 'MESON_PATHS_LIST', + 'MESON_CMAKE_ROOT', +] + +class CMakeException(MesonException): + pass + +class CMakeBuildFile: + def __init__(self, file: Path, is_cmake: bool, is_temp: bool) -> None: + self.file = file + self.is_cmake = is_cmake + self.is_temp = is_temp + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}: {self.file}; cmake={self.is_cmake}; temp={self.is_temp}>' + +def _flags_to_list(raw: str) -> T.List[str]: + # Convert a raw commandline string into a list of strings + res = [] + curr = '' + escape = False + in_string = False + for i in raw: + if escape: + # If the current char is not a quote, the '\' is probably important + if i not in ['"', "'"]: + curr += '\\' + curr += i + escape = False + elif i == '\\': + escape = True + elif i in ['"', "'"]: + in_string = not in_string + elif i in [' ', '\n']: + if in_string: + curr += i + else: + res += [curr] + curr = '' + else: + curr += i + res += [curr] + res = list(filter(lambda x: len(x) > 0, res)) + return res + +def cmake_get_generator_args(env: 'Environment') -> T.List[str]: + backend_name = env.coredata.get_option(OptionKey('backend')) + assert isinstance(backend_name, str) + assert backend_name in backend_generator_map + return ['-G', backend_generator_map[backend_name]] + +def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]: + res = [] # type: T.List[str] + if not isinstance(raw, list): + raw = [raw] + + for i in raw: + if not isinstance(i, dict): + raise MesonException('Invalid CMake defines. Expected a dict, but got a {}'.format(type(i).__name__)) + for key, val in i.items(): + assert isinstance(key, str) + if key in blacklist_cmake_defs: + mlog.warning('Setting', mlog.bold(key), 'is not supported. See the meson docs for cross compilation support:') + mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation') + mlog.warning(' --> Ignoring this option') + continue + if isinstance(val, (str, int, float)): + res += [f'-D{key}={val}'] + elif isinstance(val, bool): + val_str = 'ON' if val else 'OFF' + res += [f'-D{key}={val_str}'] + else: + raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key)) + + return res + +# TODO: this functuin will become obsolete once the `cmake_args` kwarg is dropped +def check_cmake_args(args: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + dis = ['-D' + x for x in blacklist_cmake_defs] + assert dis # Ensure that dis is not empty. + for i in args: + if any([i.startswith(x) for x in dis]): + mlog.warning('Setting', mlog.bold(i), 'is not supported. See the meson docs for cross compilation support:') + mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation') + mlog.warning(' --> Ignoring this option') + continue + res += [i] + return res + +class CMakeInclude: + def __init__(self, path: Path, isSystem: bool = False): + self.path = path + self.isSystem = isSystem + + def __repr__(self) -> str: + return f'' + +class CMakeFileGroup: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.defines = data.get('defines', '') # type: str + self.flags = _flags_to_list(data.get('compileFlags', '')) # type: T.List[str] + self.is_generated = data.get('isGenerated', False) # type: bool + self.language = data.get('language', 'C') # type: str + self.sources = [Path(x) for x in data.get('sources', [])] # type: T.List[Path] + + # Fix the include directories + self.includes = [] # type: T.List[CMakeInclude] + for i in data.get('includePath', []): + if isinstance(i, dict) and 'path' in i: + isSystem = i.get('isSystem', False) + assert isinstance(isSystem, bool) + assert isinstance(i['path'], str) + self.includes += [CMakeInclude(Path(i['path']), isSystem)] + elif isinstance(i, str): + self.includes += [CMakeInclude(Path(i))] + + def log(self) -> None: + mlog.log('flags =', mlog.bold(', '.join(self.flags))) + mlog.log('defines =', mlog.bold(', '.join(self.defines))) + mlog.log('includes =', mlog.bold(', '.join([str(x) for x in self.includes]))) + mlog.log('is_generated =', mlog.bold('true' if self.is_generated else 'false')) + mlog.log('language =', mlog.bold(self.language)) + mlog.log('sources:') + for i in self.sources: + with mlog.nested(): + mlog.log(i.as_posix()) + +class CMakeTarget: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.artifacts = [Path(x) for x in data.get('artifacts', [])] # type: T.List[Path] + self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path + self.build_dir = Path(data.get('buildDirectory', '')) # type: Path + self.name = data.get('name', '') # type: str + self.full_name = data.get('fullName', '') # type: str + self.install = data.get('hasInstallRule', False) # type: bool + self.install_paths = [Path(x) for x in set(data.get('installPaths', []))] # type: T.List[Path] + self.link_lang = data.get('linkerLanguage', '') # type: str + self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) # type: T.List[str] + self.link_flags = _flags_to_list(data.get('linkFlags', '')) # type: T.List[str] + self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) # type: T.List[str] + # self.link_path = Path(data.get('linkPath', '')) # type: Path + self.type = data.get('type', 'EXECUTABLE') # type: str + # self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool + self.files = [] # type: T.List[CMakeFileGroup] + + for i in data.get('fileGroups', []): + self.files += [CMakeFileGroup(i)] + + def log(self) -> None: + mlog.log('artifacts =', mlog.bold(', '.join([x.as_posix() for x in self.artifacts]))) + mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix())) + mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) + mlog.log('name =', mlog.bold(self.name)) + mlog.log('full_name =', mlog.bold(self.full_name)) + mlog.log('install =', mlog.bold('true' if self.install else 'false')) + mlog.log('install_paths =', mlog.bold(', '.join([x.as_posix() for x in self.install_paths]))) + mlog.log('link_lang =', mlog.bold(self.link_lang)) + mlog.log('link_libraries =', mlog.bold(', '.join(self.link_libraries))) + mlog.log('link_flags =', mlog.bold(', '.join(self.link_flags))) + mlog.log('link_lang_flags =', mlog.bold(', '.join(self.link_lang_flags))) + # mlog.log('link_path =', mlog.bold(self.link_path)) + mlog.log('type =', mlog.bold(self.type)) + # mlog.log('is_generator_provided =', mlog.bold('true' if self.is_generator_provided else 'false')) + for idx, i in enumerate(self.files): + mlog.log(f'Files {idx}:') + with mlog.nested(): + i.log() + +class CMakeProject: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path + self.build_dir = Path(data.get('buildDirectory', '')) # type: Path + self.name = data.get('name', '') # type: str + self.targets = [] # type: T.List[CMakeTarget] + + for i in data.get('targets', []): + self.targets += [CMakeTarget(i)] + + def log(self) -> None: + mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix())) + mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) + mlog.log('name =', mlog.bold(self.name)) + for idx, i in enumerate(self.targets): + mlog.log(f'Target {idx}:') + with mlog.nested(): + i.log() + +class CMakeConfiguration: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.name = data.get('name', '') # type: str + self.projects = [] # type: T.List[CMakeProject] + for i in data.get('projects', []): + self.projects += [CMakeProject(i)] + + def log(self) -> None: + mlog.log('name =', mlog.bold(self.name)) + for idx, i in enumerate(self.projects): + mlog.log(f'Project {idx}:') + with mlog.nested(): + i.log() + +class SingleTargetOptions: + def __init__(self) -> None: + self.opts = {} # type: T.Dict[str, str] + self.lang_args = {} # type: T.Dict[str, T.List[str]] + self.link_args = [] # type: T.List[str] + self.install = 'preserve' + + def set_opt(self, opt: str, val: str) -> None: + self.opts[opt] = val + + def append_args(self, lang: str, args: T.List[str]) -> None: + if lang not in self.lang_args: + self.lang_args[lang] = [] + self.lang_args[lang] += args + + def append_link_args(self, args: T.List[str]) -> None: + self.link_args += args + + def set_install(self, install: bool) -> None: + self.install = 'true' if install else 'false' + + def get_override_options(self, initial: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + for i in initial: + opt = i[:i.find('=')] + if opt not in self.opts: + res += [i] + res += [f'{k}={v}' for k, v in self.opts.items()] + return res + + def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]: + if lang in self.lang_args: + return initial + self.lang_args[lang] + return initial + + def get_link_args(self, initial: T.List[str]) -> T.List[str]: + return initial + self.link_args + + def get_install(self, initial: bool) -> bool: + return {'preserve': initial, 'true': True, 'false': False}[self.install] + +class TargetOptions: + def __init__(self) -> None: + self.global_options = SingleTargetOptions() + self.target_options = {} # type: T.Dict[str, SingleTargetOptions] + + def __getitem__(self, tgt: str) -> SingleTargetOptions: + if tgt not in self.target_options: + self.target_options[tgt] = SingleTargetOptions() + return self.target_options[tgt] + + def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_override_options(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_override_options(initial) + return initial + + def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_compile_args(lang, initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_compile_args(lang, initial) + return initial + + def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_link_args(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_link_args(initial) + return initial + + def get_install(self, tgt: str, initial: bool) -> bool: + initial = self.global_options.get_install(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_install(initial) + return initial diff --git a/meson/mesonbuild/cmake/data/preload.cmake b/meson/mesonbuild/cmake/data/preload.cmake new file mode 100644 index 000000000..234860b75 --- /dev/null +++ b/meson/mesonbuild/cmake/data/preload.cmake @@ -0,0 +1,82 @@ +if(MESON_PS_LOADED) + return() +endif() + +set(MESON_PS_LOADED ON) + +cmake_policy(PUSH) +cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html + +# Dummy macros that have a special meaning in the meson code +macro(meson_ps_execute_delayed_calls) +endmacro() + +macro(meson_ps_reload_vars) +endmacro() + +macro(meson_ps_disabled_function) + message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subprojects.\n" + "This should not be an issue but may lead to compilation errors.") +endmacro() + +# Helper macro to inspect the current CMake state +macro(meson_ps_inspect_vars) + set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") + meson_ps_execute_delayed_calls() +endmacro() + + +# Override some system functions with custom code and forward the args +# to the original function +macro(add_custom_command) + meson_ps_inspect_vars() + _add_custom_command(${ARGV}) +endmacro() + +macro(add_custom_target) + meson_ps_inspect_vars() + _add_custom_target(${ARGV}) +endmacro() + +macro(set_property) + meson_ps_inspect_vars() + _set_property(${ARGV}) +endmacro() + +function(set_source_files_properties) + set(FILES) + set(I 0) + set(PROPERTIES OFF) + + while(I LESS ARGC) + if(NOT PROPERTIES) + if("${ARGV${I}}" STREQUAL "PROPERTIES") + set(PROPERTIES ON) + else() + list(APPEND FILES "${ARGV${I}}") + endif() + + math(EXPR I "${I} + 1") + else() + set(ID_IDX ${I}) + math(EXPR PROP_IDX "${ID_IDX} + 1") + + set(ID "${ARGV${ID_IDX}}") + set(PROP "${ARGV${PROP_IDX}}") + + set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}") + math(EXPR I "${I} + 2") + endif() + endwhile() +endfunction() + +# Disable some functions that would mess up the CMake meson integration +macro(target_precompile_headers) + meson_ps_disabled_function(target_precompile_headers) +endmacro() + +set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property) +meson_ps_reload_vars() + +cmake_policy(POP) diff --git a/meson/mesonbuild/cmake/executor.py b/meson/mesonbuild/cmake/executor.py new file mode 100644 index 000000000..7b06f2623 --- /dev/null +++ b/meson/mesonbuild/cmake/executor.py @@ -0,0 +1,246 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +import subprocess as S +from pathlib import Path +from threading import Thread +import typing as T +import re +import os + +from .. import mlog +from ..environment import Environment +from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice, is_windows, OptionKey +from ..programs import find_external_program, NonExistingExternalProgram + +if T.TYPE_CHECKING: + from ..environment import Environment + from ..programs import ExternalProgram + +TYPE_result = T.Tuple[int, T.Optional[str], T.Optional[str]] +TYPE_cache_key = T.Tuple[str, T.Tuple[str, ...], str, T.FrozenSet[T.Tuple[str, str]]] + +class CMakeExecutor: + # The class's copy of the CMake path. Avoids having to search for it + # multiple times in the same Meson invocation. + class_cmakebin = PerMachine(None, None) # type: PerMachine[T.Optional[ExternalProgram]] + class_cmakevers = PerMachine(None, None) # type: PerMachine[T.Optional[str]] + class_cmake_cache = {} # type: T.Dict[T.Any, TYPE_result] + + def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False): + self.min_version = version + self.environment = environment + self.for_machine = for_machine + self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent) + self.always_capture_stderr = True + self.print_cmout = False + self.prefix_paths = [] # type: T.List[str] + self.extra_cmake_args = [] # type: T.List[str] + + if self.cmakebin is None: + return + + if not version_compare(self.cmakevers, self.min_version): + mlog.warning( + 'The version of CMake', mlog.bold(self.cmakebin.get_path()), + 'is', mlog.bold(self.cmakevers), 'but version', mlog.bold(self.min_version), + 'is required') + self.cmakebin = None + return + + self.prefix_paths = self.environment.coredata.options[OptionKey('cmake_prefix_path', machine=self.for_machine)].value + if self.prefix_paths: + self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))] + + def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple[T.Optional['ExternalProgram'], T.Optional[str]]: + # Only search for CMake the first time and store the result in the class + # definition + if isinstance(CMakeExecutor.class_cmakebin[self.for_machine], NonExistingExternalProgram): + mlog.debug(f'CMake binary for {self.for_machine} is cached as not found') + return None, None + elif CMakeExecutor.class_cmakebin[self.for_machine] is not None: + mlog.debug(f'CMake binary for {self.for_machine} is cached.') + else: + assert CMakeExecutor.class_cmakebin[self.for_machine] is None + + mlog.debug(f'CMake binary for {self.for_machine} is not cached') + for potential_cmakebin in find_external_program( + environment, self.for_machine, 'cmake', 'CMake', + environment.default_cmake, allow_default_for_cross=False): + version_if_ok = self.check_cmake(potential_cmakebin) + if not version_if_ok: + continue + if not silent: + mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()), + f'({version_if_ok})') + CMakeExecutor.class_cmakebin[self.for_machine] = potential_cmakebin + CMakeExecutor.class_cmakevers[self.for_machine] = version_if_ok + break + else: + if not silent: + mlog.log('Found CMake:', mlog.red('NO')) + # Set to False instead of None to signify that we've already + # searched for it and not found it + CMakeExecutor.class_cmakebin[self.for_machine] = NonExistingExternalProgram() + CMakeExecutor.class_cmakevers[self.for_machine] = None + return None, None + + return CMakeExecutor.class_cmakebin[self.for_machine], CMakeExecutor.class_cmakevers[self.for_machine] + + def check_cmake(self, cmakebin: 'ExternalProgram') -> T.Optional[str]: + if not cmakebin.found(): + mlog.log(f'Did not find CMake {cmakebin.name!r}') + return None + try: + p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found CMake {!r} but couldn\'t run it' + ''.format(' '.join(cmakebin.get_command()))) + return None + except FileNotFoundError: + mlog.warning('We thought we found CMake {!r} but now it\'s not there. How odd!' + ''.format(' '.join(cmakebin.get_command()))) + return None + except PermissionError: + msg = 'Found CMake {!r} but didn\'t have permissions to run it.'.format(' '.join(cmakebin.get_command())) + if not is_windows(): + msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' + mlog.warning(msg) + return None + cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out).group(2) + return cmvers + + def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None: + if print_cmout is not None: + self.print_cmout = print_cmout + if always_capture_stderr is not None: + self.always_capture_stderr = always_capture_stderr + + def _cache_key(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_cache_key: + fenv = frozenset(env.items()) if env is not None else frozenset() + targs = tuple(args) + return (self.cmakebin.get_path(), targs, build_dir.as_posix(), fenv) + + def _call_cmout_stderr(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + cmd = self.cmakebin.get_command() + args + proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.PIPE, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion + + # stdout and stderr MUST be read at the same time to avoid pipe + # blocking issues. The easiest way to do this is with a separate + # thread for one of the pipes. + def print_stdout() -> None: + while True: + line = proc.stdout.readline() + if not line: + break + mlog.log(line.decode(errors='ignore').strip('\n')) + proc.stdout.close() + + t = Thread(target=print_stdout) + t.start() + + try: + # Read stderr line by line and log non trace lines + raw_trace = '' + tline_start_reg = re.compile(r'^\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(.*$') + inside_multiline_trace = False + while True: + line_raw = proc.stderr.readline() + if not line_raw: + break + line = line_raw.decode(errors='ignore') + if tline_start_reg.match(line): + raw_trace += line + inside_multiline_trace = not line.endswith(' )\n') + elif inside_multiline_trace: + raw_trace += line + else: + mlog.warning(line.strip('\n')) + + finally: + proc.stderr.close() + t.join() + proc.wait() + + return proc.returncode, None, raw_trace + + def _call_cmout(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + cmd = self.cmakebin.get_command() + args + proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.STDOUT, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion + while True: + line = proc.stdout.readline() + if not line: + break + mlog.log(line.decode(errors='ignore').strip('\n')) + proc.stdout.close() + proc.wait() + return proc.returncode, None, None + + def _call_quiet(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + build_dir.mkdir(parents=True, exist_ok=True) + cmd = self.cmakebin.get_command() + args + ret = S.run(cmd, env=env, cwd=str(build_dir), close_fds=False, + stdout=S.PIPE, stderr=S.PIPE, universal_newlines=False) # TODO [PYTHON_37]: drop Path conversion + rc = ret.returncode + out = ret.stdout.decode(errors='ignore') + err = ret.stderr.decode(errors='ignore') + return rc, out, err + + def _call_impl(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + mlog.debug(f'Calling CMake ({self.cmakebin.get_command()}) in {build_dir} with:') + for i in args: + mlog.debug(f' - "{i}"') + if not self.print_cmout: + return self._call_quiet(args, build_dir, env) + else: + if self.always_capture_stderr: + return self._call_cmout_stderr(args, build_dir, env) + else: + return self._call_cmout(args, build_dir, env) + + def call(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]] = None, disable_cache: bool = False) -> TYPE_result: + if env is None: + env = os.environ.copy() + + args = args + self.extra_cmake_args + if disable_cache: + return self._call_impl(args, build_dir, env) + + # First check if cached, if not call the real cmake function + cache = CMakeExecutor.class_cmake_cache + key = self._cache_key(args, build_dir, env) + if key not in cache: + cache[key] = self._call_impl(args, build_dir, env) + return cache[key] + + def found(self) -> bool: + return self.cmakebin is not None + + def version(self) -> str: + return self.cmakevers + + def executable_path(self) -> str: + return self.cmakebin.get_path() + + def get_command(self) -> T.List[str]: + return self.cmakebin.get_command() + + def get_cmake_prefix_paths(self) -> T.List[str]: + return self.prefix_paths + + def machine_choice(self) -> MachineChoice: + return self.for_machine diff --git a/meson/mesonbuild/cmake/fileapi.py b/meson/mesonbuild/cmake/fileapi.py new file mode 100644 index 000000000..5d4d01a13 --- /dev/null +++ b/meson/mesonbuild/cmake/fileapi.py @@ -0,0 +1,320 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .common import CMakeException, CMakeBuildFile, CMakeConfiguration +import typing as T +from .. import mlog +from pathlib import Path +import json +import re + +STRIP_KEYS = ['cmake', 'reply', 'backtrace', 'backtraceGraph', 'version'] + +class CMakeFileAPI: + def __init__(self, build_dir: Path): + self.build_dir = build_dir + self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1' + self.request_dir = self.api_base_dir / 'query' / 'client-meson' + self.reply_dir = self.api_base_dir / 'reply' + self.cmake_sources = [] # type: T.List[CMakeBuildFile] + self.cmake_configurations = [] # type: T.List[CMakeConfiguration] + self.kind_resolver_map = { + 'codemodel': self._parse_codemodel, + 'cmakeFiles': self._parse_cmakeFiles, + } + + def get_cmake_sources(self) -> T.List[CMakeBuildFile]: + return self.cmake_sources + + def get_cmake_configurations(self) -> T.List[CMakeConfiguration]: + return self.cmake_configurations + + def setup_request(self) -> None: + self.request_dir.mkdir(parents=True, exist_ok=True) + + query = { + 'requests': [ + {'kind': 'codemodel', 'version': {'major': 2, 'minor': 0}}, + {'kind': 'cmakeFiles', 'version': {'major': 1, 'minor': 0}}, + ] + } + + query_file = self.request_dir / 'query.json' + query_file.write_text(json.dumps(query, indent=2), encoding='utf-8') + + def load_reply(self) -> None: + if not self.reply_dir.is_dir(): + raise CMakeException('No response from the CMake file API') + + root = None + reg_index = re.compile(r'^index-.*\.json$') + for i in self.reply_dir.iterdir(): + if reg_index.match(i.name): + root = i + break + + if not root: + raise CMakeException('Failed to find the CMake file API index') + + index = self._reply_file_content(root) # Load the root index + index = self._strip_data(index) # Avoid loading duplicate files + index = self._resolve_references(index) # Load everything + index = self._strip_data(index) # Strip unused data (again for loaded files) + + # Debug output + debug_json = self.build_dir / '..' / 'fileAPI.json' + debug_json = debug_json.resolve() + debug_json.write_text(json.dumps(index, indent=2), encoding='utf-8') + mlog.cmd_ci_include(debug_json.as_posix()) + + # parse the JSON + for i in index['objects']: + assert(isinstance(i, dict)) + assert('kind' in i) + assert(i['kind'] in self.kind_resolver_map) + + self.kind_resolver_map[i['kind']](i) + + def _parse_codemodel(self, data: T.Dict[str, T.Any]) -> None: + assert('configurations' in data) + assert('paths' in data) + + source_dir = data['paths']['source'] + build_dir = data['paths']['build'] + + # The file API output differs quite a bit from the server + # output. It is more flat than the server output and makes + # heavy use of references. Here these references are + # resolved and the resulting data structure is identical + # to the CMake serve output. + + def helper_parse_dir(dir_entry: T.Dict[str, T.Any]) -> T.Tuple[Path, Path]: + src_dir = Path(dir_entry.get('source', '.')) + bld_dir = Path(dir_entry.get('build', '.')) + src_dir = src_dir if src_dir.is_absolute() else source_dir / src_dir + bld_dir = bld_dir if bld_dir.is_absolute() else build_dir / bld_dir + src_dir = src_dir.resolve() + bld_dir = bld_dir.resolve() + + return src_dir, bld_dir + + def parse_sources(comp_group: T.Dict[str, T.Any], tgt: T.Dict[str, T.Any]) -> T.Tuple[T.List[Path], T.List[Path], T.List[int]]: + gen = [] + src = [] + idx = [] + + src_list_raw = tgt.get('sources', []) + for i in comp_group.get('sourceIndexes', []): + if i >= len(src_list_raw) or 'path' not in src_list_raw[i]: + continue + if src_list_raw[i].get('isGenerated', False): + gen += [Path(src_list_raw[i]['path'])] + else: + src += [Path(src_list_raw[i]['path'])] + idx += [i] + + return src, gen, idx + + def parse_target(tgt: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: + src_dir, bld_dir = helper_parse_dir(cnf.get('paths', {})) + + # Parse install paths (if present) + install_paths = [] + if 'install' in tgt: + prefix = Path(tgt['install']['prefix']['path']) + install_paths = [prefix / x['path'] for x in tgt['install']['destinations']] + install_paths = list(set(install_paths)) + + # On the first look, it looks really nice that the CMake devs have + # decided to use arrays for the linker flags. However, this feeling + # soon turns into despair when you realize that there only one entry + # per type in most cases, and we still have to do manual string splitting. + link_flags = [] + link_libs = [] + for i in tgt.get('link', {}).get('commandFragments', []): + if i['role'] == 'flags': + link_flags += [i['fragment']] + elif i['role'] == 'libraries': + link_libs += [i['fragment']] + elif i['role'] == 'libraryPath': + link_flags += ['-L{}'.format(i['fragment'])] + elif i['role'] == 'frameworkPath': + link_flags += ['-F{}'.format(i['fragment'])] + for i in tgt.get('archive', {}).get('commandFragments', []): + if i['role'] == 'flags': + link_flags += [i['fragment']] + + # TODO The `dependencies` entry is new in the file API. + # maybe we can make use of that in addition to the + # implicit dependency detection + tgt_data = { + 'artifacts': [Path(x.get('path', '')) for x in tgt.get('artifacts', [])], + 'sourceDirectory': src_dir, + 'buildDirectory': bld_dir, + 'name': tgt.get('name', ''), + 'fullName': tgt.get('nameOnDisk', ''), + 'hasInstallRule': 'install' in tgt, + 'installPaths': install_paths, + 'linkerLanguage': tgt.get('link', {}).get('language', 'CXX'), + 'linkLibraries': ' '.join(link_libs), # See previous comment block why we join the array + 'linkFlags': ' '.join(link_flags), # See previous comment block why we join the array + 'type': tgt.get('type', 'EXECUTABLE'), + 'fileGroups': [], + } + + processed_src_idx = [] + for cg in tgt.get('compileGroups', []): + # Again, why an array, when there is usually only one element + # and arguments are separated with spaces... + flags = [] + for i in cg.get('compileCommandFragments', []): + flags += [i['fragment']] + + cg_data = { + 'defines': [x.get('define', '') for x in cg.get('defines', [])], + 'compileFlags': ' '.join(flags), + 'language': cg.get('language', 'C'), + 'isGenerated': None, # Set later, flag is stored per source file + 'sources': [], + 'includePath': cg.get('includes', []), + } + + normal_src, generated_src, src_idx = parse_sources(cg, tgt) + if normal_src: + cg_data = dict(cg_data) + cg_data['isGenerated'] = False + cg_data['sources'] = normal_src + tgt_data['fileGroups'] += [cg_data] + if generated_src: + cg_data = dict(cg_data) + cg_data['isGenerated'] = True + cg_data['sources'] = generated_src + tgt_data['fileGroups'] += [cg_data] + processed_src_idx += src_idx + + # Object libraries have no compile groups, only source groups. + # So we add all the source files to a dummy source group that were + # not found in the previous loop + normal_src = [] + generated_src = [] + for idx, src in enumerate(tgt.get('sources', [])): + if idx in processed_src_idx: + continue + + if src.get('isGenerated', False): + generated_src += [src['path']] + else: + normal_src += [src['path']] + + if normal_src: + tgt_data['fileGroups'] += [{ + 'isGenerated': False, + 'sources': normal_src, + }] + if generated_src: + tgt_data['fileGroups'] += [{ + 'isGenerated': True, + 'sources': generated_src, + }] + return tgt_data + + def parse_project(pro: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: + # Only look at the first directory specified in directoryIndexes + # TODO Figure out what the other indexes are there for + p_src_dir = source_dir + p_bld_dir = build_dir + try: + p_src_dir, p_bld_dir = helper_parse_dir(cnf['directories'][pro['directoryIndexes'][0]]) + except (IndexError, KeyError): + pass + + pro_data = { + 'name': pro.get('name', ''), + 'sourceDirectory': p_src_dir, + 'buildDirectory': p_bld_dir, + 'targets': [], + } + + for ref in pro.get('targetIndexes', []): + tgt = {} + try: + tgt = cnf['targets'][ref] + except (IndexError, KeyError): + pass + pro_data['targets'] += [parse_target(tgt)] + + return pro_data + + for cnf in data.get('configurations', []): + cnf_data = { + 'name': cnf.get('name', ''), + 'projects': [], + } + + for pro in cnf.get('projects', []): + cnf_data['projects'] += [parse_project(pro)] + + self.cmake_configurations += [CMakeConfiguration(cnf_data)] + + def _parse_cmakeFiles(self, data: T.Dict[str, T.Any]) -> None: + assert 'inputs' in data + assert 'paths' in data + + src_dir = Path(data['paths']['source']) + + for i in data['inputs']: + path = Path(i['path']) + path = path if path.is_absolute() else src_dir / path + self.cmake_sources += [CMakeBuildFile(path, i.get('isCMake', False), i.get('isGenerated', False))] + + def _strip_data(self, data: T.Any) -> T.Any: + if isinstance(data, list): + for idx, i in enumerate(data): + data[idx] = self._strip_data(i) + + elif isinstance(data, dict): + new = {} + for key, val in data.items(): + if key not in STRIP_KEYS: + new[key] = self._strip_data(val) + data = new + + return data + + def _resolve_references(self, data: T.Any) -> T.Any: + if isinstance(data, list): + for idx, i in enumerate(data): + data[idx] = self._resolve_references(i) + + elif isinstance(data, dict): + # Check for the "magic" reference entry and insert + # it into the root data dict + if 'jsonFile' in data: + data.update(self._reply_file_content(data['jsonFile'])) + + for key, val in data.items(): + data[key] = self._resolve_references(val) + + return data + + def _reply_file_content(self, filename: Path) -> T.Dict[str, T.Any]: + real_path = self.reply_dir / filename + if not real_path.exists(): + raise CMakeException(f'File "{real_path}" does not exist') + + data = json.loads(real_path.read_text(encoding='utf-8')) + assert isinstance(data, dict) + for i in data.keys(): + assert isinstance(i, str) + return data diff --git a/meson/mesonbuild/cmake/generator.py b/meson/mesonbuild/cmake/generator.py new file mode 100644 index 000000000..848fdf944 --- /dev/null +++ b/meson/mesonbuild/cmake/generator.py @@ -0,0 +1,134 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import mesonlib +import typing as T + +def parse_generator_expressions(raw: str) -> str: + '''Parse CMake generator expressions + + Most generator expressions are simply ignored for + simplicety, however some are required for some common + use cases. + ''' + + # Early abort if no generator expression present + if '$<' not in raw: + return raw + + out = '' # type: str + i = 0 # type: int + + def equal(arg: str) -> str: + col_pos = arg.find(',') + if col_pos < 0: + return '0' + else: + return '1' if arg[:col_pos] == arg[col_pos + 1:] else '0' + + def vers_comp(op: str, arg: str) -> str: + col_pos = arg.find(',') + if col_pos < 0: + return '0' + else: + return '1' if mesonlib.version_compare(arg[:col_pos], '{}{}'.format(op, arg[col_pos + 1:])) else '0' + + supported = { + # Boolean functions + 'BOOL': lambda x: '0' if x.upper() in ['0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'] or x.endswith('-NOTFOUND') else '1', + 'AND': lambda x: '1' if all([y == '1' for y in x.split(',')]) else '0', + 'OR': lambda x: '1' if any([y == '1' for y in x.split(',')]) else '0', + 'NOT': lambda x: '0' if x == '1' else '1', + + '0': lambda x: '', + '1': lambda x: x, + + # String operations + 'STREQUAL': equal, + 'EQUAL': equal, + 'VERSION_LESS': lambda x: vers_comp('<', x), + 'VERSION_GREATER': lambda x: vers_comp('>', x), + 'VERSION_EQUAL': lambda x: vers_comp('=', x), + 'VERSION_LESS_EQUAL': lambda x: vers_comp('<=', x), + 'VERSION_GREATER_EQUAL': lambda x: vers_comp('>=', x), + + # String modification + 'LOWER_CASE': lambda x: x.lower(), + 'UPPER_CASE': lambda x: x.upper(), + + # Always assume the BUILD_INTERFACE is valid. + # INSTALL_INTERFACE is always invalid for subprojects and + # it should also never appear in CMake config files, used + # for dependencies + 'INSTALL_INTERFACE': lambda x: '', + 'BUILD_INTERFACE': lambda x: x, + + # Constants + 'ANGLE-R': lambda x: '>', + 'COMMA': lambda x: ',', + 'SEMICOLON': lambda x: ';', + } # type: T.Dict[str, T.Callable[[str], str]] + + # Recursively evaluate generator expressions + def eval_generator_expressions() -> str: + nonlocal i + i += 2 + + func = '' # type: str + args = '' # type: str + res = '' # type: str + exp = '' # type: str + + # Determine the body of the expression + while i < len(raw): + if raw[i] == '>': + # End of the generator expression + break + elif i < len(raw) - 1 and raw[i] == '$' and raw[i + 1] == '<': + # Nested generator expression + exp += eval_generator_expressions() + else: + # Generator expression body + exp += raw[i] + + i += 1 + + # Split the expression into a function and arguments part + col_pos = exp.find(':') + if col_pos < 0: + func = exp + else: + func = exp[:col_pos] + args = exp[col_pos + 1:] + + func = func.strip() + args = args.strip() + + # Evaluate the function + if func in supported: + res = supported[func](args) + + return res + + while i < len(raw): + if i < len(raw) - 1 and raw[i] == '$' and raw[i + 1] == '<': + # Generator expression detected --> try resolving it + out += eval_generator_expressions() + else: + # Normal string, leave unchanged + out += raw[i] + + i += 1 + + return out diff --git a/meson/mesonbuild/cmake/interpreter.py b/meson/mesonbuild/cmake/interpreter.py new file mode 100644 index 000000000..fe66becb9 --- /dev/null +++ b/meson/mesonbuild/cmake/interpreter.py @@ -0,0 +1,1369 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +from .common import CMakeException, CMakeTarget, TargetOptions, CMakeConfiguration, language_map, backend_generator_map, cmake_get_generator_args, check_cmake_args +from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel, ReplyCMakeInputs, ReplyCodeModel +from .fileapi import CMakeFileAPI +from .executor import CMakeExecutor +from .toolchain import CMakeToolchain, CMakeExecScope +from .traceparser import CMakeTraceParser, CMakeGeneratorTarget +from .. import mlog, mesonlib +from ..mesonlib import MachineChoice, OrderedSet, version_compare, path_is_in_root, relative_to_if_possible, OptionKey +from ..mesondata import mesondata +from ..compilers.compilers import assembler_suffixes, lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header +from ..programs import ExternalProgram +from ..coredata import FORBIDDEN_TARGET_NAMES +from enum import Enum +from functools import lru_cache +from pathlib import Path +import typing as T +import re +from os import environ + +from ..mparser import ( + Token, + BaseNode, + CodeBlockNode, + FunctionNode, + ArrayNode, + ArgumentNode, + AssignmentNode, + BooleanNode, + StringNode, + IdNode, + IndexNode, + MethodNode, + NumberNode, +) + + +if T.TYPE_CHECKING: + from .._typing import ImmutableListProtocol + from ..build import Build + from ..backend.backends import Backend + from ..environment import Environment + +TYPE_mixed = T.Union[str, int, bool, Path, BaseNode] +TYPE_mixed_list = T.Union[TYPE_mixed, T.Sequence[TYPE_mixed]] +TYPE_mixed_kwargs = T.Dict[str, TYPE_mixed_list] + +# Disable all warnings automaticall enabled with --trace and friends +# See https://cmake.org/cmake/help/latest/variable/CMAKE_POLICY_WARNING_CMPNNNN.html +disable_policy_warnings = [ + 'CMP0025', + 'CMP0047', + 'CMP0056', + 'CMP0060', + 'CMP0065', + 'CMP0066', + 'CMP0067', + 'CMP0082', + 'CMP0089', + 'CMP0102', +] + +target_type_map = { + 'STATIC_LIBRARY': 'static_library', + 'MODULE_LIBRARY': 'shared_module', + 'SHARED_LIBRARY': 'shared_library', + 'EXECUTABLE': 'executable', + 'OBJECT_LIBRARY': 'static_library', + 'INTERFACE_LIBRARY': 'header_only' +} + +skip_targets = ['UTILITY'] + +blacklist_compiler_flags = [ + '-Wall', '-Wextra', '-Weverything', '-Werror', '-Wpedantic', '-pedantic', '-w', + '/W1', '/W2', '/W3', '/W4', '/Wall', '/WX', '/w', + '/O1', '/O2', '/Ob', '/Od', '/Og', '/Oi', '/Os', '/Ot', '/Ox', '/Oy', '/Ob0', + '/RTC1', '/RTCc', '/RTCs', '/RTCu', + '/Z7', '/Zi', '/ZI', +] + +blacklist_link_flags = [ + '/machine:x64', '/machine:x86', '/machine:arm', '/machine:ebc', + '/debug', '/debug:fastlink', '/debug:full', '/debug:none', + '/incremental', +] + +blacklist_clang_cl_link_flags = ['/GR', '/EHsc', '/MDd', '/Zi', '/RTC1'] + +blacklist_link_libs = [ + 'kernel32.lib', + 'user32.lib', + 'gdi32.lib', + 'winspool.lib', + 'shell32.lib', + 'ole32.lib', + 'oleaut32.lib', + 'uuid.lib', + 'comdlg32.lib', + 'advapi32.lib' +] + +transfer_dependencies_from = ['header_only'] + +_cmake_name_regex = re.compile(r'[^_a-zA-Z0-9]') +def _sanitize_cmake_name(name: str) -> str: + name = _cmake_name_regex.sub('_', name) + if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson'): + name = 'cm_' + name + return name + +class OutputTargetMap: + rm_so_version = re.compile(r'(\.[0-9]+)+$') + + def __init__(self, build_dir: Path): + self.tgt_map = {} # type: T.Dict[str, T.Union['ConverterTarget', 'ConverterCustomTarget']] + self.build_dir = build_dir + + def add(self, tgt: T.Union['ConverterTarget', 'ConverterCustomTarget']) -> None: + def assign_keys(keys: T.List[str]) -> None: + for i in [x for x in keys if x]: + self.tgt_map[i] = tgt + keys = [self._target_key(tgt.cmake_name)] + if isinstance(tgt, ConverterTarget): + keys += [tgt.full_name] + keys += [self._rel_artifact_key(x) for x in tgt.artifacts] + keys += [self._base_artifact_key(x) for x in tgt.artifacts] + if isinstance(tgt, ConverterCustomTarget): + keys += [self._rel_generated_file_key(x) for x in tgt.original_outputs] + keys += [self._base_generated_file_key(x) for x in tgt.original_outputs] + assign_keys(keys) + + def _return_first_valid_key(self, keys: T.List[str]) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: + for i in keys: + if i and i in self.tgt_map: + return self.tgt_map[i] + return None + + def target(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: + return self._return_first_valid_key([self._target_key(name)]) + + def executable(self, name: str) -> T.Optional['ConverterTarget']: + tgt = self.target(name) + if tgt is None or not isinstance(tgt, ConverterTarget): + return None + if tgt.meson_func() != 'executable': + return None + return tgt + + def artifact(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: + keys = [] + candidates = [name, OutputTargetMap.rm_so_version.sub('', name)] + for i in lib_suffixes: + if not name.endswith('.' + i): + continue + new_name = name[:-len(i) - 1] + new_name = OutputTargetMap.rm_so_version.sub('', new_name) + candidates += [f'{new_name}.{i}'] + for i in candidates: + keys += [self._rel_artifact_key(Path(i)), Path(i).name, self._base_artifact_key(Path(i))] + return self._return_first_valid_key(keys) + + def generated(self, name: Path) -> T.Optional['ConverterCustomTarget']: + res = self._return_first_valid_key([self._rel_generated_file_key(name), self._base_generated_file_key(name)]) + assert res is None or isinstance(res, ConverterCustomTarget) + return res + + # Utility functions to generate local keys + def _rel_path(self, fname: Path) -> T.Optional[Path]: + try: + return fname.resolve().relative_to(self.build_dir) + except ValueError: + pass + return None + + def _target_key(self, tgt_name: str) -> str: + return f'__tgt_{tgt_name}__' + + def _rel_generated_file_key(self, fname: Path) -> T.Optional[str]: + path = self._rel_path(fname) + return f'__relgen_{path.as_posix()}__' if path else None + + def _base_generated_file_key(self, fname: Path) -> str: + return f'__gen_{fname.name}__' + + def _rel_artifact_key(self, fname: Path) -> T.Optional[str]: + path = self._rel_path(fname) + return f'__relart_{path.as_posix()}__' if path else None + + def _base_artifact_key(self, fname: Path) -> str: + return f'__art_{fname.name}__' + +class ConverterTarget: + def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: MachineChoice) -> None: + self.env = env + self.for_machine = for_machine + self.artifacts = target.artifacts + self.src_dir = target.src_dir + self.build_dir = target.build_dir + self.name = target.name + self.cmake_name = target.name + self.full_name = target.full_name + self.type = target.type + self.install = target.install + self.install_dir = None # type: T.Optional[Path] + self.link_libraries = target.link_libraries + self.link_flags = target.link_flags + target.link_lang_flags + self.depends_raw = [] # type: T.List[str] + self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] + + if target.install_paths: + self.install_dir = target.install_paths[0] + + self.languages = set() # type: T.Set[str] + self.sources = [] # type: T.List[Path] + self.generated = [] # type: T.List[Path] + self.generated_ctgt = [] # type: T.List[CustomTargetReference] + self.includes = [] # type: T.List[Path] + self.sys_includes = [] # type: T.List[Path] + self.link_with = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] + self.object_libs = [] # type: T.List[ConverterTarget] + self.compile_opts = {} # type: T.Dict[str, T.List[str]] + self.public_compile_opts = [] # type: T.List[str] + self.pie = False + + # Project default override options (c_std, cpp_std, etc.) + self.override_options = [] # type: T.List[str] + + # Convert the target name to a valid meson target name + self.name = _sanitize_cmake_name(self.name) + + self.generated_raw = [] # type: T.List[Path] + + for i in target.files: + languages = set() # type: T.Set[str] + src_suffixes = set() # type: T.Set[str] + + # Insert suffixes + for j in i.sources: + if not j.suffix: + continue + src_suffixes.add(j.suffix[1:]) + + # Determine the meson language(s) + # Extract the default language from the explicit CMake field + lang_cmake_to_meson = {val.lower(): key for key, val in language_map.items()} + languages.add(lang_cmake_to_meson.get(i.language.lower(), 'c')) + + # Determine missing languages from the source suffixes + for sfx in src_suffixes: + for key, val in lang_suffixes.items(): + if sfx in val: + languages.add(key) + break + + # Register the new languages and initialize the compile opts array + for lang in languages: + self.languages.add(lang) + if lang not in self.compile_opts: + self.compile_opts[lang] = [] + + # Add arguments, but avoid duplicates + args = i.flags + args += [f'-D{x}' for x in i.defines] + for lang in languages: + self.compile_opts[lang] += [x for x in args if x not in self.compile_opts[lang]] + + # Handle include directories + self.includes += [x.path for x in i.includes if x.path not in self.includes and not x.isSystem] + self.sys_includes += [x.path for x in i.includes if x.path not in self.sys_includes and x.isSystem] + + # Add sources to the right array + if i.is_generated: + self.generated_raw += i.sources + else: + self.sources += i.sources + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}: {self.name}>' + + std_regex = re.compile(r'([-]{1,2}std=|/std:v?|[-]{1,2}std:)(.*)') + + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, subdir: Path, install_prefix: Path, trace: CMakeTraceParser) -> None: + # Detect setting the C and C++ standard and do additional compiler args manipulation + for i in ['c', 'cpp']: + if i not in self.compile_opts: + continue + + temp = [] + for j in self.compile_opts[i]: + m = ConverterTarget.std_regex.match(j) + ctgt = output_target_map.generated(Path(j)) + if m: + std = m.group(2) + supported = self._all_lang_stds(i) + if std not in supported: + mlog.warning( + 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-' + 'level {0}_std if build errors occur. Known ' + '{0}_stds are: {2}'.format(i, std, ' '.join(supported)), + once=True + ) + continue + self.override_options += [f'{i}_std={std}'] + elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']: + self.pie = True + elif isinstance(ctgt, ConverterCustomTarget): + # Sometimes projects pass generated source files as compiler + # flags. Add these as generated sources to ensure that the + # corresponding custom target is run.2 + self.generated_raw += [Path(j)] + temp += [j] + elif j in blacklist_compiler_flags: + pass + else: + temp += [j] + + self.compile_opts[i] = temp + + # Make sure to force enable -fPIC for OBJECT libraries + if self.type.upper() == 'OBJECT_LIBRARY': + self.pie = True + + # Use the CMake trace, if required + tgt = trace.targets.get(self.cmake_name) + if tgt: + self.depends_raw = trace.targets[self.cmake_name].depends + + # TODO refactor this copy paste from CMakeDependency for future releases + reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$') + to_process = [self.cmake_name] + processed = [] + while len(to_process) > 0: + curr = to_process.pop(0) + + if curr in processed or curr not in trace.targets: + continue + + tgt = trace.targets[curr] + cfgs = [] + cfg = '' + otherDeps = [] + libraries = [] + mlog.debug(str(tgt)) + + if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties: + self.includes += [Path(x) for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x] + + if 'INTERFACE_LINK_OPTIONS' in tgt.properties: + self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x] + + if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties: + self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x] + + if 'INTERFACE_COMPILE_OPTIONS' in tgt.properties: + self.public_compile_opts += [x for x in tgt.properties['INTERFACE_COMPILE_OPTIONS'] if x] + + if 'IMPORTED_CONFIGURATIONS' in tgt.properties: + cfgs += [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x] + cfg = cfgs[0] + + if 'CONFIGURATIONS' in tgt.properties: + cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x] + cfg = cfgs[0] + + is_debug = self.env.coredata.get_option(OptionKey('debug')); + if is_debug: + if 'DEBUG' in cfgs: + cfg = 'DEBUG' + elif 'RELEASE' in cfgs: + cfg = 'RELEASE' + else: + if 'RELEASE' in cfgs: + cfg = 'RELEASE' + + if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties: + libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x] + elif 'IMPORTED_IMPLIB' in tgt.properties: + libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x] + elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: + libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] + elif 'IMPORTED_LOCATION' in tgt.properties: + libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] + + if 'LINK_LIBRARIES' in tgt.properties: + otherDeps += [x for x in tgt.properties['LINK_LIBRARIES'] if x] + + if 'INTERFACE_LINK_LIBRARIES' in tgt.properties: + otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] + + if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties: + otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x] + elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties: + otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x] + + for j in otherDeps: + if j in trace.targets: + to_process += [j] + elif reg_is_lib.match(j) or Path(j).exists(): + libraries += [j] + + for j in libraries: + if j not in self.link_libraries: + self.link_libraries += [j] + + processed += [curr] + elif self.type.upper() not in ['EXECUTABLE', 'OBJECT_LIBRARY']: + mlog.warning('CMake: Target', mlog.bold(self.cmake_name), 'not found in CMake trace. This can lead to build errors') + + temp = [] + for i in self.link_libraries: + # Let meson handle this arcane magic + if ',-rpath,' in i: + continue + if not Path(i).is_absolute(): + link_with = output_target_map.artifact(i) + if link_with: + self.link_with += [link_with] + continue + + temp += [i] + self.link_libraries = temp + + # Filter out files that are not supported by the language + supported = list(assembler_suffixes) + list(header_suffixes) + list(obj_suffixes) + for i in self.languages: + supported += list(lang_suffixes[i]) + supported = [f'.{x}' for x in supported] + self.sources = [x for x in self.sources if any([x.name.endswith(y) for y in supported])] + self.generated_raw = [x for x in self.generated_raw if any([x.name.endswith(y) for y in supported])] + + # Make paths relative + def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]: + if not x.is_absolute(): + x = self.src_dir / x + x = x.resolve() + assert x.is_absolute() + if not x.exists() and not any([x.name.endswith(y) for y in obj_suffixes]) and not is_generated: + if path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True): + x.mkdir(parents=True, exist_ok=True) + return x.relative_to(Path(self.env.get_build_dir()) / subdir) + else: + mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'does not exist.') + mlog.warning(' --> Ignoring. This can lead to build errors.') + return None + if x in trace.explicit_headers: + return None + if ( + path_is_in_root(x, Path(self.env.get_source_dir())) + and not ( + path_is_in_root(x, root_src_dir) or + path_is_in_root(x, Path(self.env.get_build_dir())) + ) + ): + mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'is inside the root project but', mlog.bold('not'), 'inside the subproject.') + mlog.warning(' --> Ignoring. This can lead to build errors.') + return None + if path_is_in_root(x, Path(self.env.get_build_dir())) and is_header: + return x.relative_to(Path(self.env.get_build_dir()) / subdir) + if path_is_in_root(x, root_src_dir): + return x.relative_to(root_src_dir) + return x + + build_dir_rel = self.build_dir.relative_to(Path(self.env.get_build_dir()) / subdir) + self.generated_raw = [rel_path(x, False, True) for x in self.generated_raw] + self.includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.includes)] + [build_dir_rel])) + self.sys_includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.sys_includes)])) + self.sources = [rel_path(x, False, False) for x in self.sources] + + # Resolve custom targets + for gen_file in self.generated_raw: + ctgt = output_target_map.generated(gen_file) + if ctgt: + assert isinstance(ctgt, ConverterCustomTarget) + ref = ctgt.get_ref(gen_file) + assert isinstance(ref, CustomTargetReference) and ref.valid() + self.generated_ctgt += [ref] + elif gen_file is not None: + self.generated += [gen_file] + + # Remove delete entries + self.includes = [x for x in self.includes if x is not None] + self.sys_includes = [x for x in self.sys_includes if x is not None] + self.sources = [x for x in self.sources if x is not None] + + # Make sure '.' is always in the include directories + if Path('.') not in self.includes: + self.includes += [Path('.')] + + # make install dir relative to the install prefix + if self.install_dir and self.install_dir.is_absolute(): + if path_is_in_root(self.install_dir, install_prefix): + self.install_dir = self.install_dir.relative_to(install_prefix) + + # Remove blacklisted options and libs + def check_flag(flag: str) -> bool: + if flag.lower() in blacklist_link_flags or flag in blacklist_compiler_flags + blacklist_clang_cl_link_flags: + return False + if flag.startswith('/D'): + return False + return True + + self.link_libraries = [x for x in self.link_libraries if x.lower() not in blacklist_link_libs] + self.link_flags = [x for x in self.link_flags if check_flag(x)] + + # Handle OSX frameworks + def handle_frameworks(flags: T.List[str]) -> T.List[str]: + res: T.List[str] = [] + for i in flags: + p = Path(i) + if not p.exists() or not p.name.endswith('.framework'): + res += [i] + continue + res += ['-framework', p.stem] + return res + + self.link_libraries = handle_frameworks(self.link_libraries) + self.link_flags = handle_frameworks(self.link_flags) + + # Handle explicit CMake add_dependency() calls + for i in self.depends_raw: + dep_tgt = output_target_map.target(i) + if dep_tgt: + self.depends.append(dep_tgt) + + def process_object_libs(self, obj_target_list: T.List['ConverterTarget'], linker_workaround: bool) -> None: + # Try to detect the object library(s) from the generated input sources + temp = [x for x in self.generated if any([x.name.endswith('.' + y) for y in obj_suffixes])] + stem = [x.stem for x in temp] + exts = self._all_source_suffixes() + # Temp now stores the source filenames of the object files + for i in obj_target_list: + source_files = [x.name for x in i.sources + i.generated] + for j in stem: + # On some platforms (specifically looking at you Windows with vs20xy backend) CMake does + # not produce object files with the format `foo.cpp.obj`, instead it skipps the language + # suffix and just produces object files like `foo.obj`. Thus we have to do our best to + # undo this step and guess the correct language suffix of the object file. This is done + # by trying all language suffixes meson knows and checking if one of them fits. + candidates = [j] # type: T.List[str] + if not any([j.endswith('.' + x) for x in exts]): + mlog.warning('Object files do not contain source file extensions, thus falling back to guessing them.', once=True) + candidates += [f'{j}.{x}' for x in exts] + if any([x in source_files for x in candidates]): + if linker_workaround: + self._append_objlib_sources(i) + else: + self.includes += i.includes + self.includes = list(OrderedSet(self.includes)) + self.object_libs += [i] + break + + # Filter out object files from the sources + self.generated = [x for x in self.generated if not any([x.name.endswith('.' + y) for y in obj_suffixes])] + + def _append_objlib_sources(self, tgt: 'ConverterTarget') -> None: + self.includes += tgt.includes + self.sources += tgt.sources + self.generated += tgt.generated + self.generated_ctgt += tgt.generated_ctgt + self.includes = list(OrderedSet(self.includes)) + self.sources = list(OrderedSet(self.sources)) + self.generated = list(OrderedSet(self.generated)) + self.generated_ctgt = list(OrderedSet(self.generated_ctgt)) + + # Inherit compiler arguments since they may be required for building + for lang, opts in tgt.compile_opts.items(): + if lang not in self.compile_opts: + self.compile_opts[lang] = [] + self.compile_opts[lang] += [x for x in opts if x not in self.compile_opts[lang]] + + @lru_cache(maxsize=None) + def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]': + suffixes = [] # type: T.List[str] + for exts in lang_suffixes.values(): + suffixes += [x for x in exts] + return suffixes + + @lru_cache(maxsize=None) + def _all_lang_stds(self, lang: str) -> 'ImmutableListProtocol[str]': + try: + res = self.env.coredata.options[OptionKey('std', machine=MachineChoice.BUILD, lang=lang)].choices + except KeyError: + return [] + + # TODO: Get rid of this once we have proper typing for options + assert isinstance(res, list) + for i in res: + assert isinstance(i, str) + + return res + + def process_inter_target_dependencies(self) -> None: + # Move the dependencies from all transfer_dependencies_from to the target + to_process = list(self.depends) + processed = [] + new_deps = [] + for i in to_process: + processed += [i] + if isinstance(i, ConverterTarget) and i.meson_func() in transfer_dependencies_from: + to_process += [x for x in i.depends if x not in processed] + else: + new_deps += [i] + self.depends = list(OrderedSet(new_deps)) + + def cleanup_dependencies(self) -> None: + # Clear the dependencies from targets that where moved from + if self.meson_func() in transfer_dependencies_from: + self.depends = [] + + def meson_func(self) -> str: + return target_type_map.get(self.type.upper()) + + def log(self) -> None: + mlog.log('Target', mlog.bold(self.name), f'({self.cmake_name})') + mlog.log(' -- artifacts: ', mlog.bold(str(self.artifacts))) + mlog.log(' -- full_name: ', mlog.bold(self.full_name)) + mlog.log(' -- type: ', mlog.bold(self.type)) + mlog.log(' -- install: ', mlog.bold('true' if self.install else 'false')) + mlog.log(' -- install_dir: ', mlog.bold(self.install_dir.as_posix() if self.install_dir else '')) + mlog.log(' -- link_libraries: ', mlog.bold(str(self.link_libraries))) + mlog.log(' -- link_with: ', mlog.bold(str(self.link_with))) + mlog.log(' -- object_libs: ', mlog.bold(str(self.object_libs))) + mlog.log(' -- link_flags: ', mlog.bold(str(self.link_flags))) + mlog.log(' -- languages: ', mlog.bold(str(self.languages))) + mlog.log(' -- includes: ', mlog.bold(str(self.includes))) + mlog.log(' -- sys_includes: ', mlog.bold(str(self.sys_includes))) + mlog.log(' -- sources: ', mlog.bold(str(self.sources))) + mlog.log(' -- generated: ', mlog.bold(str(self.generated))) + mlog.log(' -- generated_ctgt: ', mlog.bold(str(self.generated_ctgt))) + mlog.log(' -- pie: ', mlog.bold('true' if self.pie else 'false')) + mlog.log(' -- override_opts: ', mlog.bold(str(self.override_options))) + mlog.log(' -- depends: ', mlog.bold(str(self.depends))) + mlog.log(' -- options:') + for key, val in self.compile_opts.items(): + mlog.log(' -', key, '=', mlog.bold(str(val))) + +class CustomTargetReference: + def __init__(self, ctgt: 'ConverterCustomTarget', index: int) -> None: + self.ctgt = ctgt # type: ConverterCustomTarget + self.index = index # type: int + + def __repr__(self) -> str: + if self.valid(): + return '<{}: {} [{}]>'.format(self.__class__.__name__, self.ctgt.name, self.ctgt.outputs[self.index]) + else: + return f'<{self.__class__.__name__}: INVALID REFERENCE>' + + def valid(self) -> bool: + return self.ctgt is not None and self.index >= 0 + + def filename(self) -> str: + return self.ctgt.outputs[self.index] + +class ConverterCustomTarget: + tgt_counter = 0 # type: int + out_counter = 0 # type: int + + def __init__(self, target: CMakeGeneratorTarget, env: 'Environment', for_machine: MachineChoice) -> None: + assert target.current_bin_dir is not None + assert target.current_src_dir is not None + self.name = target.name + if not self.name: + self.name = f'custom_tgt_{ConverterCustomTarget.tgt_counter}' + ConverterCustomTarget.tgt_counter += 1 + self.cmake_name = str(self.name) + self.original_outputs = list(target.outputs) + self.outputs = [x.name for x in self.original_outputs] + self.conflict_map = {} # type: T.Dict[str, str] + self.command = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]] + self.working_dir = target.working_dir + self.depends_raw = target.depends + self.inputs = [] # type: T.List[T.Union[str, CustomTargetReference]] + self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] + self.current_bin_dir = target.current_bin_dir # type: Path + self.current_src_dir = target.current_src_dir # type: Path + self.env = env + self.for_machine = for_machine + self._raw_target = target + + # Convert the target name to a valid meson target name + self.name = _sanitize_cmake_name(self.name) + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}: {self.name} {self.outputs}>' + + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, all_outputs: T.List[str], trace: CMakeTraceParser) -> None: + # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR} + if self.working_dir is None: + self.working_dir = self.current_bin_dir + + # relative paths in the working directory are always relative + # to ${CMAKE_CURRENT_BINARY_DIR} + if not self.working_dir.is_absolute(): + self.working_dir = self.current_bin_dir / self.working_dir + + # Modify the original outputs if they are relative. Again, + # relative paths are relative to ${CMAKE_CURRENT_BINARY_DIR} + def ensure_absolute(x: Path) -> Path: + if x.is_absolute(): + return x + else: + return self.current_bin_dir / x + self.original_outputs = [ensure_absolute(x) for x in self.original_outputs] + + # Ensure that there is no duplicate output in the project so + # that meson can handle cases where the same filename is + # generated in multiple directories + temp_outputs = [] # type: T.List[str] + for i in self.outputs: + if i in all_outputs: + old = str(i) + i = f'c{ConverterCustomTarget.out_counter}_{i}' + ConverterCustomTarget.out_counter += 1 + self.conflict_map[old] = i + all_outputs += [i] + temp_outputs += [i] + self.outputs = temp_outputs + + # Check if the command is a build target + commands = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]] + for curr_cmd in self._raw_target.command: + assert(isinstance(curr_cmd, list)) + cmd = [] # type: T.List[T.Union[str, ConverterTarget]] + + for j in curr_cmd: + if not j: + continue + target = output_target_map.executable(j) + if target: + # When cross compiling, binaries have to be executed with an exe_wrapper (for instance wine for mingw-w64) + if self.env.exe_wrapper is not None and self.env.properties[self.for_machine].get_cmake_use_exe_wrapper(): + assert isinstance(self.env.exe_wrapper, ExternalProgram) + cmd += self.env.exe_wrapper.get_command() + cmd += [target] + continue + elif j in trace.targets: + trace_tgt = trace.targets[j] + if trace_tgt.type == 'EXECUTABLE' and 'IMPORTED_LOCATION' in trace_tgt.properties: + cmd += trace_tgt.properties['IMPORTED_LOCATION'] + continue + mlog.debug(f'CMake: Found invalid CMake target "{j}" --> ignoring \n{trace_tgt}') + + # Fallthrough on error + cmd += [j] + + commands += [cmd] + self.command = commands + + # If the custom target does not declare any output, create a dummy + # one that can be used as dependency. + if not self.outputs: + self.outputs = [self.name + '.h'] + + # Check dependencies and input files + for i in self.depends_raw: + if not i: + continue + raw = Path(i) + art = output_target_map.artifact(i) + tgt = output_target_map.target(i) + gen = output_target_map.generated(raw) + + rel_to_root = None + try: + rel_to_root = raw.relative_to(root_src_dir) + except ValueError: + rel_to_root = None + + # First check for existing files. Only then check for existing + # targets, etc. This reduces the chance of misdetecting input files + # as outputs from other targets. + # See https://github.com/mesonbuild/meson/issues/6632 + if not raw.is_absolute() and (self.current_src_dir / raw).exists(): + self.inputs += [(self.current_src_dir / raw).relative_to(root_src_dir).as_posix()] + elif raw.is_absolute() and raw.exists() and rel_to_root is not None: + self.inputs += [rel_to_root.as_posix()] + elif art: + self.depends += [art] + elif tgt: + self.depends += [tgt] + elif gen: + ctgt_ref = gen.get_ref(raw) + assert ctgt_ref is not None + self.inputs += [ctgt_ref] + + def process_inter_target_dependencies(self) -> None: + # Move the dependencies from all transfer_dependencies_from to the target + to_process = list(self.depends) + processed = [] + new_deps = [] + for i in to_process: + processed += [i] + if isinstance(i, ConverterTarget) and i.meson_func() in transfer_dependencies_from: + to_process += [x for x in i.depends if x not in processed] + else: + new_deps += [i] + self.depends = list(OrderedSet(new_deps)) + + def get_ref(self, fname: Path) -> T.Optional[CustomTargetReference]: + name = fname.name + try: + if name in self.conflict_map: + name = self.conflict_map[name] + idx = self.outputs.index(name) + return CustomTargetReference(self, idx) + except ValueError: + return None + + def log(self) -> None: + mlog.log('Custom Target', mlog.bold(self.name), f'({self.cmake_name})') + mlog.log(' -- command: ', mlog.bold(str(self.command))) + mlog.log(' -- outputs: ', mlog.bold(str(self.outputs))) + mlog.log(' -- conflict_map: ', mlog.bold(str(self.conflict_map))) + mlog.log(' -- working_dir: ', mlog.bold(str(self.working_dir))) + mlog.log(' -- depends_raw: ', mlog.bold(str(self.depends_raw))) + mlog.log(' -- inputs: ', mlog.bold(str(self.inputs))) + mlog.log(' -- depends: ', mlog.bold(str(self.depends))) + +class CMakeAPI(Enum): + SERVER = 1 + FILE = 2 + +class CMakeInterpreter: + def __init__(self, build: 'Build', subdir: Path, src_dir: Path, install_prefix: Path, env: 'Environment', backend: 'Backend'): + self.build = build + self.subdir = subdir + self.src_dir = src_dir + self.build_dir_rel = subdir / '__CMake_build' + self.build_dir = Path(env.get_build_dir()) / self.build_dir_rel + self.install_prefix = install_prefix + self.env = env + self.for_machine = MachineChoice.HOST # TODO make parameter + self.backend_name = backend.name + self.linkers = set() # type: T.Set[str] + self.cmake_api = CMakeAPI.SERVER + self.client = CMakeClient(self.env) + self.fileapi = CMakeFileAPI(self.build_dir) + + # Raw CMake results + self.bs_files = [] # type: T.List[Path] + self.codemodel_configs = None # type: T.Optional[T.List[CMakeConfiguration]] + self.raw_trace = None # type: T.Optional[str] + + # Analysed data + self.project_name = '' + self.languages = [] # type: T.List[str] + self.targets = [] # type: T.List[ConverterTarget] + self.custom_targets = [] # type: T.List[ConverterCustomTarget] + self.trace = CMakeTraceParser('', Path('.')) # Will be replaced in analyse + self.output_target_map = OutputTargetMap(self.build_dir) + + # Generated meson data + self.generated_targets = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]] + self.internal_name_map = {} # type: T.Dict[str, str] + + # Do some special handling for object libraries for certain configurations + self._object_lib_workaround = False + if self.backend_name.startswith('vs'): + for comp in self.env.coredata.compilers[self.for_machine].values(): + if comp.get_linker_id() == 'link': + self._object_lib_workaround = True + break + + def configure(self, extra_cmake_options: T.List[str]) -> CMakeExecutor: + # Find CMake + # TODO: Using MachineChoice.BUILD should always be correct here, but also evaluate the use of self.for_machine + cmake_exe = CMakeExecutor(self.env, '>=3.7', MachineChoice.BUILD) + if not cmake_exe.found(): + raise CMakeException('Unable to find CMake') + self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True) + + preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env) + toolchain = CMakeToolchain(cmake_exe, self.env, self.for_machine, CMakeExecScope.SUBPROJECT, self.build_dir, preload_file) + toolchain_file = toolchain.write() + + # TODO: drop this check once the deprecated `cmake_args` kwarg is removed + extra_cmake_options = check_cmake_args(extra_cmake_options) + + cmake_args = [] + cmake_args += cmake_get_generator_args(self.env) + cmake_args += [f'-DCMAKE_INSTALL_PREFIX={self.install_prefix}'] + cmake_args += extra_cmake_options + trace_args = self.trace.trace_args() + cmcmp_args = [f'-DCMAKE_POLICY_WARNING_{x}=OFF' for x in disable_policy_warnings] + + if version_compare(cmake_exe.version(), '>=3.14'): + self.cmake_api = CMakeAPI.FILE + self.fileapi.setup_request() + + # Run CMake + mlog.log() + with mlog.nested(): + mlog.log('Configuring the build directory with', mlog.bold('CMake'), 'version', mlog.cyan(cmake_exe.version())) + mlog.log(mlog.bold('Running CMake with:'), ' '.join(cmake_args)) + mlog.log(mlog.bold(' - build directory: '), self.build_dir.as_posix()) + mlog.log(mlog.bold(' - source directory: '), self.src_dir.as_posix()) + mlog.log(mlog.bold(' - toolchain file: '), toolchain_file.as_posix()) + mlog.log(mlog.bold(' - preload file: '), preload_file.as_posix()) + mlog.log(mlog.bold(' - trace args: '), ' '.join(trace_args)) + mlog.log(mlog.bold(' - disabled policy warnings:'), '[{}]'.format(', '.join(disable_policy_warnings))) + mlog.log() + self.build_dir.mkdir(parents=True, exist_ok=True) + os_env = environ.copy() + os_env['LC_ALL'] = 'C' + final_args = cmake_args + trace_args + cmcmp_args + toolchain.get_cmake_args() + [self.src_dir.as_posix()] + + cmake_exe.set_exec_mode(print_cmout=True, always_capture_stderr=self.trace.requires_stderr()) + rc, _, self.raw_trace = cmake_exe.call(final_args, self.build_dir, env=os_env, disable_cache=True) + + mlog.log() + h = mlog.green('SUCCEEDED') if rc == 0 else mlog.red('FAILED') + mlog.log('CMake configuration:', h) + if rc != 0: + raise CMakeException('Failed to configure the CMake subproject') + + return cmake_exe + + def initialise(self, extra_cmake_options: T.List[str]) -> None: + # Run configure the old way because doing it + # with the server doesn't work for some reason + # Additionally, the File API requires a configure anyway + cmake_exe = self.configure(extra_cmake_options) + + # Continue with the file API If supported + if self.cmake_api is CMakeAPI.FILE: + # Parse the result + self.fileapi.load_reply() + + # Load the buildsystem file list + cmake_files = self.fileapi.get_cmake_sources() + self.bs_files = [x.file for x in cmake_files if not x.is_cmake and not x.is_temp] + self.bs_files = [relative_to_if_possible(x, Path(self.env.get_source_dir())) for x in self.bs_files] + self.bs_files = [x for x in self.bs_files if not path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True)] + self.bs_files = list(OrderedSet(self.bs_files)) + + # Load the codemodel configurations + self.codemodel_configs = self.fileapi.get_cmake_configurations() + return + + with self.client.connect(cmake_exe): + generator = backend_generator_map[self.backend_name] + self.client.do_handshake(self.src_dir, self.build_dir, generator, 1) + + # Do a second configure to initialise the server + self.client.query_checked(RequestConfigure(), 'CMake server configure') + + # Generate the build system files + self.client.query_checked(RequestCompute(), 'Generating build system files') + + # Get CMake build system files + bs_reply = self.client.query_checked(RequestCMakeInputs(), 'Querying build system files') + assert isinstance(bs_reply, ReplyCMakeInputs) + + # Now get the CMake code model + cm_reply = self.client.query_checked(RequestCodeModel(), 'Querying the CMake code model') + assert isinstance(cm_reply, ReplyCodeModel) + + src_dir = bs_reply.src_dir + self.bs_files = [x.file for x in bs_reply.build_files if not x.is_cmake and not x.is_temp] + self.bs_files = [relative_to_if_possible(src_dir / x, Path(self.env.get_source_dir()), resolve=True) for x in self.bs_files] + self.bs_files = [x for x in self.bs_files if not path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True)] + self.bs_files = list(OrderedSet(self.bs_files)) + self.codemodel_configs = cm_reply.configs + + def analyse(self) -> None: + if self.codemodel_configs is None: + raise CMakeException('CMakeInterpreter was not initialized') + + # Clear analyser data + self.project_name = '' + self.languages = [] + self.targets = [] + self.custom_targets = [] + + # Parse the trace + self.trace.parse(self.raw_trace) + + # Find all targets + added_target_names = [] # type: T.List[str] + for i_0 in self.codemodel_configs: + for j_0 in i_0.projects: + if not self.project_name: + self.project_name = j_0.name + for k_0 in j_0.targets: + # Avoid duplicate targets from different configurations and known + # dummy CMake internal target types + if k_0.type not in skip_targets and k_0.name not in added_target_names: + added_target_names += [k_0.name] + self.targets += [ConverterTarget(k_0, self.env, self.for_machine)] + + # Add interface targets from trace, if not already present. + # This step is required because interface targets were removed from + # the CMake file API output. + api_target_name_list = [x.name for x in self.targets] + for i_1 in self.trace.targets.values(): + if i_1.type != 'INTERFACE' or i_1.name in api_target_name_list or i_1.imported: + continue + dummy = CMakeTarget({ + 'name': i_1.name, + 'type': 'INTERFACE_LIBRARY', + 'sourceDirectory': self.src_dir, + 'buildDirectory': self.build_dir, + }) + self.targets += [ConverterTarget(dummy, self.env, self.for_machine)] + + for i_2 in self.trace.custom_targets: + self.custom_targets += [ConverterCustomTarget(i_2, self.env, self.for_machine)] + + # generate the output_target_map + for i_3 in [*self.targets, *self.custom_targets]: + assert isinstance(i_3, (ConverterTarget, ConverterCustomTarget)) + self.output_target_map.add(i_3) + + # First pass: Basic target cleanup + object_libs = [] + custom_target_outputs = [] # type: T.List[str] + for ctgt in self.custom_targets: + ctgt.postprocess(self.output_target_map, self.src_dir, custom_target_outputs, self.trace) + for tgt in self.targets: + tgt.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace) + if tgt.type == 'OBJECT_LIBRARY': + object_libs += [tgt] + self.languages += [x for x in tgt.languages if x not in self.languages] + + # Second pass: Detect object library dependencies + for tgt in self.targets: + tgt.process_object_libs(object_libs, self._object_lib_workaround) + + # Third pass: Reassign dependencies to avoid some loops + for tgt in self.targets: + tgt.process_inter_target_dependencies() + for ctgt in self.custom_targets: + ctgt.process_inter_target_dependencies() + + # Fourth pass: Remove rassigned dependencies + for tgt in self.targets: + tgt.cleanup_dependencies() + + mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.') + + def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode: + if not self.project_name: + raise CMakeException('CMakeInterpreter was not analysed') + + def token(tid: str = 'string', val: TYPE_mixed = '') -> Token: + return Token(tid, self.subdir.as_posix(), 0, 0, 0, None, val) + + def string(value: str) -> StringNode: + return StringNode(token(val=value)) + + def id_node(value: str) -> IdNode: + return IdNode(token(val=value)) + + def number(value: int) -> NumberNode: + return NumberNode(token(val=value)) + + def nodeify(value: TYPE_mixed_list) -> BaseNode: + if isinstance(value, str): + return string(value) + if isinstance(value, Path): + return string(value.as_posix()) + elif isinstance(value, bool): + return BooleanNode(token(val=value)) + elif isinstance(value, int): + return number(value) + elif isinstance(value, list): + return array(value) + elif isinstance(value, BaseNode): + return value + raise RuntimeError('invalid type of value: {} ({})'.format(type(value).__name__, str(value))) + + def indexed(node: BaseNode, index: int) -> IndexNode: + return IndexNode(node, nodeify(index)) + + def array(elements: TYPE_mixed_list) -> ArrayNode: + args = ArgumentNode(token()) + if not isinstance(elements, list): + elements = [args] + args.arguments += [nodeify(x) for x in elements if x is not None] + return ArrayNode(args, 0, 0, 0, 0) + + def function(name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> FunctionNode: + args = [] if args is None else args + kwargs = {} if kwargs is None else kwargs + args_n = ArgumentNode(token()) + if not isinstance(args, list): + assert isinstance(args, (str, int, bool, Path, BaseNode)) + args = [args] + args_n.arguments = [nodeify(x) for x in args if x is not None] + args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None} + func_n = FunctionNode(self.subdir.as_posix(), 0, 0, 0, 0, name, args_n) + return func_n + + def method(obj: BaseNode, name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> MethodNode: + args = [] if args is None else args + kwargs = {} if kwargs is None else kwargs + args_n = ArgumentNode(token()) + if not isinstance(args, list): + assert isinstance(args, (str, int, bool, Path, BaseNode)) + args = [args] + args_n.arguments = [nodeify(x) for x in args if x is not None] + args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None} + return MethodNode(self.subdir.as_posix(), 0, 0, obj, name, args_n) + + def assign(var_name: str, value: BaseNode) -> AssignmentNode: + return AssignmentNode(self.subdir.as_posix(), 0, 0, var_name, value) + + # Generate the root code block and the project function call + root_cb = CodeBlockNode(token()) + root_cb.lines += [function('project', [self.project_name] + self.languages)] + + # Add the run script for custom commands + + # Add the targets + processing = [] # type: T.List[str] + processed = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]] + name_map = {} # type: T.Dict[str, str] + + def extract_tgt(tgt: T.Union[ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> IdNode: + tgt_name = None + if isinstance(tgt, (ConverterTarget, ConverterCustomTarget)): + tgt_name = tgt.name + elif isinstance(tgt, CustomTargetReference): + tgt_name = tgt.ctgt.name + assert(tgt_name is not None and tgt_name in processed) + res_var = processed[tgt_name]['tgt'] + return id_node(res_var) if res_var else None + + def detect_cycle(tgt: T.Union[ConverterTarget, ConverterCustomTarget]) -> None: + if tgt.name in processing: + raise CMakeException('Cycle in CMake inputs/dependencies detected') + processing.append(tgt.name) + + def resolve_ctgt_ref(ref: CustomTargetReference) -> T.Union[IdNode, IndexNode]: + tgt_var = extract_tgt(ref) + if len(ref.ctgt.outputs) == 1: + return tgt_var + else: + return indexed(tgt_var, ref.index) + + def process_target(tgt: ConverterTarget) -> None: + detect_cycle(tgt) + + # First handle inter target dependencies + link_with = [] # type: T.List[IdNode] + objec_libs = [] # type: T.List[IdNode] + sources = [] # type: T.List[Path] + generated = [] # type: T.List[T.Union[IdNode, IndexNode]] + generated_filenames = [] # type: T.List[str] + custom_targets = [] # type: T.List[ConverterCustomTarget] + dependencies = [] # type: T.List[IdNode] + for i in tgt.link_with: + assert(isinstance(i, ConverterTarget)) + if i.name not in processed: + process_target(i) + link_with += [extract_tgt(i)] + for i in tgt.object_libs: + assert(isinstance(i, ConverterTarget)) + if i.name not in processed: + process_target(i) + objec_libs += [extract_tgt(i)] + for i in tgt.depends: + if not isinstance(i, ConverterCustomTarget): + continue + if i.name not in processed: + process_custom_target(i) + dependencies += [extract_tgt(i)] + + # Generate the source list and handle generated sources + sources += tgt.sources + sources += tgt.generated + + for ctgt_ref in tgt.generated_ctgt: + ctgt = ctgt_ref.ctgt + if ctgt.name not in processed: + process_custom_target(ctgt) + generated += [resolve_ctgt_ref(ctgt_ref)] + generated_filenames += [ctgt_ref.filename()] + if ctgt not in custom_targets: + custom_targets += [ctgt] + + # Add all header files from all used custom targets. This + # ensures that all custom targets are built before any + # sources of the current target are compiled and thus all + # header files are present. This step is necessary because + # CMake always ensures that a custom target is executed + # before another target if at least one output is used. + for ctgt in custom_targets: + for j in ctgt.outputs: + if not is_header(j) or j in generated_filenames: + continue + + generated += [resolve_ctgt_ref(ctgt.get_ref(Path(j)))] + generated_filenames += [j] + + # Determine the meson function to use for the build target + tgt_func = tgt.meson_func() + if not tgt_func: + raise CMakeException(f'Unknown target type "{tgt.type}"') + + # Determine the variable names + inc_var = f'{tgt.name}_inc' + dir_var = f'{tgt.name}_dir' + sys_var = f'{tgt.name}_sys' + src_var = f'{tgt.name}_src' + dep_var = f'{tgt.name}_dep' + tgt_var = tgt.name + + install_tgt = options.get_install(tgt.cmake_name, tgt.install) + + # Generate target kwargs + tgt_kwargs = { + 'build_by_default': install_tgt, + 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries), + 'link_with': link_with, + 'include_directories': id_node(inc_var), + 'install': install_tgt, + 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options), + 'objects': [method(x, 'extract_all_objects') for x in objec_libs], + } # type: TYPE_mixed_kwargs + + # Only set if installed and only override if it is set + if install_tgt and tgt.install_dir: + tgt_kwargs['install_dir'] = tgt.install_dir + + # Handle compiler args + for key, val in tgt.compile_opts.items(): + tgt_kwargs[f'{key}_args'] = options.get_compile_args(tgt.cmake_name, key, val) + + # Handle -fPCI, etc + if tgt_func == 'executable': + tgt_kwargs['pie'] = tgt.pie + elif tgt_func == 'static_library': + tgt_kwargs['pic'] = tgt.pie + + # declare_dependency kwargs + dep_kwargs = { + 'link_args': tgt.link_flags + tgt.link_libraries, + 'link_with': id_node(tgt_var), + 'compile_args': tgt.public_compile_opts, + 'include_directories': id_node(inc_var), + } # type: TYPE_mixed_kwargs + + if dependencies: + generated += dependencies + + # Generate the function nodes + dir_node = assign(dir_var, function('include_directories', tgt.includes)) + sys_node = assign(sys_var, function('include_directories', tgt.sys_includes, {'is_system': True})) + inc_node = assign(inc_var, array([id_node(dir_var), id_node(sys_var)])) + node_list = [dir_node, sys_node, inc_node] + if tgt_func == 'header_only': + del dep_kwargs['link_with'] + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] + src_var = None + tgt_var = None + else: + src_node = assign(src_var, function('files', sources)) + tgt_node = assign(tgt_var, function(tgt_func, [tgt_var, id_node(src_var), *generated], tgt_kwargs)) + node_list += [src_node, tgt_node] + if tgt_func in ['static_library', 'shared_library']: + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] + elif tgt_func in ['shared_module']: + del dep_kwargs['link_with'] + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] + else: + dep_var = None + + # Add the nodes to the ast + root_cb.lines += node_list + processed[tgt.name] = {'inc': inc_var, 'src': src_var, 'dep': dep_var, 'tgt': tgt_var, 'func': tgt_func} + name_map[tgt.cmake_name] = tgt.name + + def process_custom_target(tgt: ConverterCustomTarget) -> None: + # CMake allows to specify multiple commands in a custom target. + # To map this to meson, a helper script is used to execute all + # commands in order. This additionally allows setting the working + # directory. + + detect_cycle(tgt) + tgt_var = tgt.name # type: str + + def resolve_source(x: T.Union[str, ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> T.Union[str, IdNode, IndexNode]: + if isinstance(x, ConverterTarget): + if x.name not in processed: + process_target(x) + return extract_tgt(x) + if isinstance(x, ConverterCustomTarget): + if x.name not in processed: + process_custom_target(x) + return extract_tgt(x) + elif isinstance(x, CustomTargetReference): + if x.ctgt.name not in processed: + process_custom_target(x.ctgt) + return resolve_ctgt_ref(x) + else: + return x + + # Generate the command list + command = [] # type: T.List[T.Union[str, IdNode, IndexNode]] + command += mesonlib.get_meson_command() + command += ['--internal', 'cmake_run_ctgt'] + command += ['-o', '@OUTPUT@'] + if tgt.original_outputs: + command += ['-O'] + [x.as_posix() for x in tgt.original_outputs] + command += ['-d', tgt.working_dir.as_posix()] + + # Generate the commands. Subcommands are separated by ';;;' + for cmd in tgt.command: + command += [resolve_source(x) for x in cmd] + [';;;'] + + tgt_kwargs = { + 'input': [resolve_source(x) for x in tgt.inputs], + 'output': tgt.outputs, + 'command': command, + 'depends': [resolve_source(x) for x in tgt.depends], + } # type: TYPE_mixed_kwargs + + root_cb.lines += [assign(tgt_var, function('custom_target', [tgt.name], tgt_kwargs))] + processed[tgt.name] = {'inc': None, 'src': None, 'dep': None, 'tgt': tgt_var, 'func': 'custom_target'} + name_map[tgt.cmake_name] = tgt.name + + # Now generate the target function calls + for ctgt in self.custom_targets: + if ctgt.name not in processed: + process_custom_target(ctgt) + for tgt in self.targets: + if tgt.name not in processed: + process_target(tgt) + + self.generated_targets = processed + self.internal_name_map = name_map + return root_cb + + def target_info(self, target: str) -> T.Optional[T.Dict[str, str]]: + # Try resolving the target name + # start by checking if there is a 100% match (excluding the name prefix) + prx_tgt = _sanitize_cmake_name(target) + if prx_tgt in self.generated_targets: + return self.generated_targets[prx_tgt] + # check if there exists a name mapping + if target in self.internal_name_map: + target = self.internal_name_map[target] + assert(target in self.generated_targets) + return self.generated_targets[target] + return None + + def target_list(self) -> T.List[str]: + return list(self.internal_name_map.keys()) diff --git a/meson/mesonbuild/cmake/toolchain.py b/meson/mesonbuild/cmake/toolchain.py new file mode 100644 index 000000000..34b737c79 --- /dev/null +++ b/meson/mesonbuild/cmake/toolchain.py @@ -0,0 +1,259 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +from .traceparser import CMakeTraceParser +from ..envconfig import CMakeSkipCompilerTest +from ..mesonlib import MachineChoice +from ..compilers import VisualStudioLikeCompiler +from .common import language_map, cmake_get_generator_args +from .. import mlog + +import shutil +import typing as T +from enum import Enum +from textwrap import dedent + +if T.TYPE_CHECKING: + from .executor import CMakeExecutor + from ..envconfig import MachineInfo, Properties, CMakeVariables + from ..environment import Environment + from ..compilers import Compiler + +class CMakeExecScope(Enum): + SUBPROJECT = 'subproject' + DEPENDENCY = 'dependency' + +class CMakeToolchain: + def __init__(self, cmakebin: 'CMakeExecutor', env: 'Environment', for_machine: MachineChoice, exec_scope: CMakeExecScope, build_dir: Path, preload_file: T.Optional[Path] = None) -> None: + self.env = env + self.cmakebin = cmakebin + self.for_machine = for_machine + self.exec_scope = exec_scope + self.preload_file = preload_file + self.build_dir = build_dir + self.build_dir = self.build_dir.resolve() + self.toolchain_file = build_dir / 'CMakeMesonToolchainFile.cmake' + self.cmcache_file = build_dir / 'CMakeCache.txt' + self.minfo = self.env.machines[self.for_machine] + self.properties = self.env.properties[self.for_machine] + self.compilers = self.env.coredata.compilers[self.for_machine] + self.cmakevars = self.env.cmakevars[self.for_machine] + self.cmakestate = self.env.coredata.cmake_cache[self.for_machine] + + self.variables = self.get_defaults() + self.variables.update(self.cmakevars.get_variables()) + + # Determine whether CMake the compiler test should be skipped + skip_status = self.properties.get_cmake_skip_compiler_test() + self.skip_check = skip_status == CMakeSkipCompilerTest.ALWAYS + if skip_status == CMakeSkipCompilerTest.DEP_ONLY and self.exec_scope == CMakeExecScope.DEPENDENCY: + self.skip_check = True + if not self.properties.get_cmake_defaults(): + self.skip_check = False + + assert self.toolchain_file.is_absolute() + + def write(self) -> Path: + if not self.toolchain_file.parent.exists(): + self.toolchain_file.parent.mkdir(parents=True) + self.toolchain_file.write_text(self.generate(), encoding='utf-8') + self.cmcache_file.write_text(self.generate_cache(), encoding='utf-8') + mlog.cmd_ci_include(self.toolchain_file.as_posix()) + return self.toolchain_file + + def get_cmake_args(self) -> T.List[str]: + args = ['-DCMAKE_TOOLCHAIN_FILE=' + self.toolchain_file.as_posix()] + if self.preload_file is not None: + args += ['-DMESON_PRELOAD_FILE=' + self.preload_file.as_posix()] + return args + + @staticmethod + def _print_vars(vars: T.Dict[str, T.List[str]]) -> str: + res = '' + for key, value in vars.items(): + res += 'set(' + key + for i in value: + res += f' "{i}"' + res += ')\n' + return res + + def generate(self) -> str: + res = dedent('''\ + ###################################### + ### AUTOMATICALLY GENERATED FILE ### + ###################################### + + # This file was generated from the configuration in the + # relevant meson machine file. See the meson documentation + # https://mesonbuild.com/Machine-files.html for more information + + if(DEFINED MESON_PRELOAD_FILE) + include("${MESON_PRELOAD_FILE}") + endif() + + ''') + + # Escape all \ in the values + for key, value in self.variables.items(): + self.variables[key] = [x.replace('\\', '/') for x in value] + + # Set compiler + if self.skip_check: + self.update_cmake_compiler_state() + res += '# CMake compiler state variables\n' + for lang, vars in self.cmakestate: + res += f'# -- Variables for language {lang}\n' + res += self._print_vars(vars) + res += '\n' + res += '\n' + + # Set variables from the current machine config + res += '# Variables from meson\n' + res += self._print_vars(self.variables) + res += '\n' + + # Add the user provided toolchain file + user_file = self.properties.get_cmake_toolchain_file() + if user_file is not None: + res += dedent(''' + # Load the CMake toolchain file specified by the user + include("{}") + + '''.format(user_file.as_posix())) + + return res + + def generate_cache(self) -> str: + if not self.skip_check: + return '' + + res = '' + for name, v in self.cmakestate.cmake_cache.items(): + res += f'{name}:{v.type}={";".join(v.value)}\n' + return res + + def get_defaults(self) -> T.Dict[str, T.List[str]]: + defaults = {} # type: T.Dict[str, T.List[str]] + + # Do nothing if the user does not want automatic defaults + if not self.properties.get_cmake_defaults(): + return defaults + + # Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which + # is not trivial since CMake lacks a list of all supported + # CMAKE_SYSTEM_NAME values. + SYSTEM_MAP = { + 'android': 'Android', + 'linux': 'Linux', + 'windows': 'Windows', + 'freebsd': 'FreeBSD', + 'darwin': 'Darwin', + } # type: T.Dict[str, str] + + # Only set these in a cross build. Otherwise CMake will trip up in native + # builds and thing they are cross (which causes TRY_RUN() to break) + if self.env.is_cross_build(when_building_for=self.for_machine): + defaults['CMAKE_SYSTEM_NAME'] = [SYSTEM_MAP.get(self.minfo.system, self.minfo.system)] + defaults['CMAKE_SYSTEM_PROCESSOR'] = [self.minfo.cpu_family] + + defaults['CMAKE_SIZEOF_VOID_P'] = ['8' if self.minfo.is_64_bit else '4'] + + sys_root = self.properties.get_sys_root() + if sys_root: + defaults['CMAKE_SYSROOT'] = [sys_root] + + def make_abs(exe: str) -> str: + if Path(exe).is_absolute(): + return exe + + p = shutil.which(exe) + if p is None: + return exe + return p + + # Set the compiler variables + for lang, comp_obj in self.compilers.items(): + prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper())) + + exe_list = comp_obj.get_exelist() + if not exe_list: + continue + + if len(exe_list) >= 2 and not self.is_cmdline_option(comp_obj, exe_list[1]): + defaults[prefix + 'COMPILER_LAUNCHER'] = [make_abs(exe_list[0])] + exe_list = exe_list[1:] + + exe_list[0] = make_abs(exe_list[0]) + defaults[prefix + 'COMPILER'] = exe_list + if comp_obj.get_id() == 'clang-cl': + defaults['CMAKE_LINKER'] = comp_obj.get_linker_exelist() + + return defaults + + @staticmethod + def is_cmdline_option(compiler: 'Compiler', arg: str) -> bool: + if isinstance(compiler, VisualStudioLikeCompiler): + return arg.startswith('/') + else: + return arg.startswith('-') + + def update_cmake_compiler_state(self) -> None: + # Check if all variables are already cached + if self.cmakestate.languages.issuperset(self.compilers.keys()): + return + + # Generate the CMakeLists.txt + mlog.debug('CMake Toolchain: Calling CMake once to generate the compiler state') + languages = list(self.compilers.keys()) + lang_ids = [language_map.get(x, x.upper()) for x in languages] + cmake_content = dedent(f''' + cmake_minimum_required(VERSION 3.7) + project(CompInfo {' '.join(lang_ids)}) + ''') + + build_dir = Path(self.env.scratch_dir) / '__CMake_compiler_info__' + build_dir.mkdir(parents=True, exist_ok=True) + cmake_file = build_dir / 'CMakeLists.txt' + cmake_file.write_text(cmake_content, encoding='utf-8') + + # Generate the temporary toolchain file + temp_toolchain_file = build_dir / 'CMakeMesonTempToolchainFile.cmake' + temp_toolchain_file.write_text(CMakeToolchain._print_vars(self.variables), encoding='utf-8') + + # Configure + trace = CMakeTraceParser(self.cmakebin.version(), build_dir) + self.cmakebin.set_exec_mode(print_cmout=False, always_capture_stderr=trace.requires_stderr()) + cmake_args = [] + cmake_args += trace.trace_args() + cmake_args += cmake_get_generator_args(self.env) + cmake_args += [f'-DCMAKE_TOOLCHAIN_FILE={temp_toolchain_file.as_posix()}', '.'] + rc, _, raw_trace = self.cmakebin.call(cmake_args, build_dir=build_dir, disable_cache=True) + + if rc != 0: + mlog.warning('CMake Toolchain: Failed to determine CMake compilers state') + return + + # Parse output + trace.parse(raw_trace) + self.cmakestate.cmake_cache = {**trace.cache} + + vars_by_file = {k.name: v for (k, v) in trace.vars_by_file.items()} + + for lang in languages: + lang_cmake = language_map.get(lang, lang.upper()) + file_name = f'CMake{lang_cmake}Compiler.cmake' + vars = vars_by_file.setdefault(file_name, {}) + vars[f'CMAKE_{lang_cmake}_COMPILER_FORCED'] = ['1'] + self.cmakestate.update(lang, vars) diff --git a/meson/mesonbuild/cmake/traceparser.py b/meson/mesonbuild/cmake/traceparser.py new file mode 100644 index 000000000..4ddc91533 --- /dev/null +++ b/meson/mesonbuild/cmake/traceparser.py @@ -0,0 +1,756 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +from .common import CMakeException +from .generator import parse_generator_expressions +from .. import mlog +from ..mesonlib import version_compare + +import typing as T +from pathlib import Path +from functools import lru_cache +import re +import json +import textwrap + +class CMakeTraceLine: + def __init__(self, file_str: str, line: int, func: str, args: T.List[str]) -> None: + self.file = CMakeTraceLine._to_path(file_str) + self.line = line + self.func = func.lower() + self.args = args + + @staticmethod + @lru_cache(maxsize=None) + def _to_path(file_str: str) -> Path: + return Path(file_str) + + def __repr__(self) -> str: + s = 'CMake TRACE: {0}:{1} {2}({3})' + return s.format(self.file, self.line, self.func, self.args) + +class CMakeCacheEntry(T.NamedTuple): + value: T.List[str] + type: str + +class CMakeTarget: + def __init__( + self, + name: str, + target_type: str, + properties: T.Optional[T.Dict[str, T.List[str]]] = None, + imported: bool = False, + tline: T.Optional[CMakeTraceLine] = None + ): + if properties is None: + properties = {} + self.name = name + self.type = target_type + self.properties = properties + self.imported = imported + self.tline = tline + self.depends = [] # type: T.List[str] + self.current_bin_dir = None # type: T.Optional[Path] + self.current_src_dir = None # type: T.Optional[Path] + + def __repr__(self) -> str: + s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}' + propSTR = '' + for i in self.properties: + propSTR += " '{}': {}\n".format(i, self.properties[i]) + return s.format(self.name, self.type, self.imported, propSTR, self.tline) + + def strip_properties(self) -> None: + # Strip the strings in the properties + if not self.properties: + return + for key, val in self.properties.items(): + self.properties[key] = [x.strip() for x in val] + assert all([';' not in x for x in self.properties[key]]) + +class CMakeGeneratorTarget(CMakeTarget): + def __init__(self, name: str) -> None: + super().__init__(name, 'CUSTOM', {}) + self.outputs = [] # type: T.List[Path] + self.command = [] # type: T.List[T.List[str]] + self.working_dir = None # type: T.Optional[Path] + +class CMakeTraceParser: + def __init__(self, cmake_version: str, build_dir: Path, permissive: bool = True) -> None: + self.vars: T.Dict[str, T.List[str]] = {} + self.vars_by_file: T.Dict[Path, T.Dict[str, T.List[str]]] = {} + self.targets: T.Dict[str, CMakeTarget] = {} + self.cache: T.Dict[str, CMakeCacheEntry] = {} + + self.explicit_headers = set() # type: T.Set[Path] + + # T.List of targes that were added with add_custom_command to generate files + self.custom_targets = [] # type: T.List[CMakeGeneratorTarget] + + self.permissive = permissive # type: bool + self.cmake_version = cmake_version # type: str + self.trace_file = 'cmake_trace.txt' + self.trace_file_path = build_dir / self.trace_file + self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human' + + # State for delayed command execution. Delayed command execution is realised + # with a custom CMake file that overrides some functions and adds some + # introspection information to the trace. + self.delayed_commands = [] # type: T.List[str] + self.stored_commands = [] # type: T.List[CMakeTraceLine] + + # All supported functions + self.functions = { + 'set': self._cmake_set, + 'unset': self._cmake_unset, + 'add_executable': self._cmake_add_executable, + 'add_library': self._cmake_add_library, + 'add_custom_command': self._cmake_add_custom_command, + 'add_custom_target': self._cmake_add_custom_target, + 'set_property': self._cmake_set_property, + 'set_target_properties': self._cmake_set_target_properties, + 'target_compile_definitions': self._cmake_target_compile_definitions, + 'target_compile_options': self._cmake_target_compile_options, + 'target_include_directories': self._cmake_target_include_directories, + 'target_link_libraries': self._cmake_target_link_libraries, + 'target_link_options': self._cmake_target_link_options, + 'add_dependencies': self._cmake_add_dependencies, + + # Special functions defined in the preload script. + # These functions do nothing in the CMake code, but have special + # meaning here in the trace parser. + 'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls, + 'meson_ps_reload_vars': self._meson_ps_reload_vars, + 'meson_ps_disabled_function': self._meson_ps_disabled_function, + } # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]] + + def trace_args(self) -> T.List[str]: + arg_map = { + 'human': ['--trace', '--trace-expand'], + 'json-v1': ['--trace-expand', '--trace-format=json-v1'], + } + + base_args = ['--no-warn-unused-cli'] + if not self.requires_stderr(): + base_args += [f'--trace-redirect={self.trace_file}'] + + return arg_map[self.trace_format] + base_args + + def requires_stderr(self) -> bool: + return version_compare(self.cmake_version, '<3.16') + + def parse(self, trace: T.Optional[str] = None) -> None: + # First load the trace (if required) + if not self.requires_stderr(): + if not self.trace_file_path.exists and not self.trace_file_path.is_file(): + raise CMakeException(f'CMake: Trace file "{self.trace_file_path!s}" not found') + trace = self.trace_file_path.read_text(errors='ignore', encoding='utf-8') + if not trace: + raise CMakeException('CMake: The CMake trace was not provided or is empty') + + # Second parse the trace + lexer1 = None + if self.trace_format == 'human': + lexer1 = self._lex_trace_human(trace) + elif self.trace_format == 'json-v1': + lexer1 = self._lex_trace_json(trace) + else: + raise CMakeException(f'CMake: Internal error: Invalid trace format {self.trace_format}. Expected [human, json-v1]') + + # Primary pass -- parse everything + for l in lexer1: + # store the function if its execution should be delayed + if l.func in self.delayed_commands: + self.stored_commands += [l] + continue + + # "Execute" the CMake function if supported + fn = self.functions.get(l.func, None) + if(fn): + fn(l) + + # Postprocess + for tgt in self.targets.values(): + tgt.strip_properties() + + def get_first_cmake_var_of(self, var_list: T.List[str]) -> T.List[str]: + # Return the first found CMake variable in list var_list + for i in var_list: + if i in self.vars: + return self.vars[i] + + return [] + + def get_cmake_var(self, var: str) -> T.List[str]: + # Return the value of the CMake variable var or an empty list if var does not exist + if var in self.vars: + return self.vars[var] + + return [] + + def var_to_str(self, var: str) -> T.Optional[str]: + if var in self.vars and self.vars[var]: + return self.vars[var][0] + + return None + + def _str_to_bool(self, expr: T.Union[str, T.List[str]]) -> bool: + if not expr: + return False + if isinstance(expr, list): + expr_str = expr[0] + else: + expr_str = expr + expr_str = expr_str.upper() + return expr_str not in ['0', 'OFF', 'NO', 'FALSE', 'N', 'IGNORE'] and not expr_str.endswith('NOTFOUND') + + def var_to_bool(self, var: str) -> bool: + return self._str_to_bool(self.vars.get(var, [])) + + def _gen_exception(self, function: str, error: str, tline: CMakeTraceLine) -> None: + # Generate an exception if the parser is not in permissive mode + + if self.permissive: + mlog.debug(f'CMake trace warning: {function}() {error}\n{tline}') + return None + raise CMakeException(f'CMake: {function}() {error}\n{tline}') + + def _cmake_set(self, tline: CMakeTraceLine) -> None: + """Handler for the CMake set() function in all variaties. + + comes in three flavors: + set( [PARENT_SCOPE]) + set( CACHE [FORCE]) + set(ENV{} ) + + We don't support the ENV variant, and any uses of it will be ignored + silently. the other two variates are supported, with some caveats: + - we don't properly handle scoping, so calls to set() inside a + function without PARENT_SCOPE set could incorrectly shadow the + outer scope. + - We don't honor the type of CACHE arguments + """ + # DOC: https://cmake.org/cmake/help/latest/command/set.html + + cache_type = None + cache_force = 'FORCE' in tline.args + try: + cache_idx = tline.args.index('CACHE') + cache_type = tline.args[cache_idx + 1] + except (ValueError, IndexError): + pass + + # 1st remove PARENT_SCOPE and CACHE from args + args = [] + for i in tline.args: + if not i or i == 'PARENT_SCOPE': + continue + + # Discard everything after the CACHE keyword + if i == 'CACHE': + break + + args.append(i) + + if len(args) < 1: + return self._gen_exception('set', 'requires at least one argument', tline) + + # Now that we've removed extra arguments all that should be left is the + # variable identifier and the value, join the value back together to + # ensure spaces in the value are correctly handled. This assumes that + # variable names don't have spaces. Please don't do that... + identifier = args.pop(0) + value = ' '.join(args) + + # Write to the CMake cache instead + if cache_type: + # Honor how the CMake FORCE parameter works + if identifier not in self.cache or cache_force: + self.cache[identifier] = CMakeCacheEntry(value.split(';'), cache_type) + + if not value: + # Same as unset + if identifier in self.vars: + del self.vars[identifier] + else: + self.vars[identifier] = value.split(';') + self.vars_by_file.setdefault(tline.file, {})[identifier] = value.split(';') + + def _cmake_unset(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/unset.html + if len(tline.args) < 1: + return self._gen_exception('unset', 'requires at least one argument', tline) + + if tline.args[0] in self.vars: + del self.vars[tline.args[0]] + + def _cmake_add_executable(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_executable.html + args = list(tline.args) # Make a working copy + + # Make sure the exe is imported + is_imported = True + if 'IMPORTED' not in args: + return self._gen_exception('add_executable', 'non imported executables are not supported', tline) + + args.remove('IMPORTED') + + if len(args) < 1: + return self._gen_exception('add_executable', 'requires at least 1 argument', tline) + + self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}, tline=tline, imported=is_imported) + + def _cmake_add_library(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_library.html + args = list(tline.args) # Make a working copy + + # Make sure the lib is imported + if 'INTERFACE' in args: + args.remove('INTERFACE') + + if len(args) < 1: + return self._gen_exception('add_library', 'interface library name not specified', tline) + + self.targets[args[0]] = CMakeTarget(args[0], 'INTERFACE', {}, tline=tline, imported='IMPORTED' in args) + elif 'IMPORTED' in args: + args.remove('IMPORTED') + + # Now, only look at the first two arguments (target_name and target_type) and ignore the rest + if len(args) < 2: + return self._gen_exception('add_library', 'requires at least 2 arguments', tline) + + self.targets[args[0]] = CMakeTarget(args[0], args[1], {}, tline=tline, imported=True) + elif 'ALIAS' in args: + args.remove('ALIAS') + + # Now, only look at the first two arguments (target_name and target_ref) and ignore the rest + if len(args) < 2: + return self._gen_exception('add_library', 'requires at least 2 arguments', tline) + + # Simulate the ALIAS with INTERFACE_LINK_LIBRARIES + self.targets[args[0]] = CMakeTarget(args[0], 'ALIAS', {'INTERFACE_LINK_LIBRARIES': [args[1]]}, tline=tline) + elif 'OBJECT' in args: + return self._gen_exception('add_library', 'OBJECT libraries are not supported', tline) + else: + self.targets[args[0]] = CMakeTarget(args[0], 'NORMAL', {}, tline=tline) + + def _cmake_add_custom_command(self, tline: CMakeTraceLine, name: T.Optional[str] = None) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_custom_command.html + args = self._flatten_args(list(tline.args)) # Commands can be passed as ';' separated lists + + if not args: + return self._gen_exception('add_custom_command', 'requires at least 1 argument', tline) + + # Skip the second function signature + if args[0] == 'TARGET': + return self._gen_exception('add_custom_command', 'TARGET syntax is currently not supported', tline) + + magic_keys = ['OUTPUT', 'COMMAND', 'MAIN_DEPENDENCY', 'DEPENDS', 'BYPRODUCTS', + 'IMPLICIT_DEPENDS', 'WORKING_DIRECTORY', 'COMMENT', 'DEPFILE', + 'JOB_POOL', 'VERBATIM', 'APPEND', 'USES_TERMINAL', 'COMMAND_EXPAND_LISTS'] + + target = CMakeGeneratorTarget(name) + + def handle_output(key: str, target: CMakeGeneratorTarget) -> None: + target.outputs += [Path(key)] + + def handle_command(key: str, target: CMakeGeneratorTarget) -> None: + if key == 'ARGS': + return + target.command[-1] += [key] + + def handle_depends(key: str, target: CMakeGeneratorTarget) -> None: + target.depends += [key] + + working_dir = None + def handle_working_dir(key: str, target: CMakeGeneratorTarget) -> None: + nonlocal working_dir + if working_dir is None: + working_dir = key + else: + working_dir += ' ' + working_dir += key + + fn = None + + for i in args: + if i in magic_keys: + if i == 'OUTPUT': + fn = handle_output + elif i == 'DEPENDS': + fn = handle_depends + elif i == 'WORKING_DIRECTORY': + fn = handle_working_dir + elif i == 'COMMAND': + fn = handle_command + target.command += [[]] + else: + fn = None + continue + + if fn is not None: + fn(i, target) + + cbinary_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR') + csource_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') + + target.working_dir = Path(working_dir) if working_dir else None + target.current_bin_dir = Path(cbinary_dir) if cbinary_dir else None + target.current_src_dir = Path(csource_dir) if csource_dir else None + target.outputs = [Path(x) for x in self._guess_files([str(y) for y in target.outputs])] + target.depends = self._guess_files(target.depends) + target.command = [self._guess_files(x) for x in target.command] + + self.custom_targets += [target] + if name: + self.targets[name] = target + + def _cmake_add_custom_target(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html + # We only the first parameter (the target name) is interesting + if len(tline.args) < 1: + return self._gen_exception('add_custom_target', 'requires at least one argument', tline) + + # It's pretty much the same as a custom command + self._cmake_add_custom_command(tline, tline.args[0]) + + def _cmake_set_property(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/set_property.html + args = list(tline.args) + + scope = args.pop(0) + + append = False + targets = [] + while args: + curr = args.pop(0) + # XXX: APPEND_STRING is specifically *not* supposed to create a + # list, is treating them as aliases really okay? + if curr == 'APPEND' or curr == 'APPEND_STRING': + append = True + continue + + if curr == 'PROPERTY': + break + + targets += curr.split(';') + + if not args: + return self._gen_exception('set_property', 'faild to parse argument list', tline) + + if len(args) == 1: + # Tries to set property to nothing so nothing has to be done + return + + identifier = args.pop(0) + if self.trace_format == 'human': + value = ' '.join(args).split(';') + else: + value = [y for x in args for y in x.split(';')] + if not value: + return + + def do_target(t: str) -> None: + if t not in self.targets: + return self._gen_exception('set_property', f'TARGET {t} not found', tline) + + tgt = self.targets[t] + if identifier not in tgt.properties: + tgt.properties[identifier] = [] + + if append: + tgt.properties[identifier] += value + else: + tgt.properties[identifier] = value + + def do_source(src: str) -> None: + if identifier != 'HEADER_FILE_ONLY' or not self._str_to_bool(value): + return + + current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') + if not current_src_dir: + mlog.warning(textwrap.dedent('''\ + CMake trace: set_property(SOURCE) called before the preload script was loaded. + Unable to determine CMAKE_CURRENT_SOURCE_DIR. This can lead to build errors. + ''')) + current_src_dir = '.' + + cur_p = Path(current_src_dir) + src_p = Path(src) + + if not src_p.is_absolute(): + src_p = cur_p / src_p + self.explicit_headers.add(src_p) + + if scope == 'TARGET': + for i in targets: + do_target(i) + elif scope == 'SOURCE': + files = self._guess_files(targets) + for i in files: + do_source(i) + + def _cmake_set_target_properties(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html + args = list(tline.args) + + targets = [] + while args: + curr = args.pop(0) + if curr == 'PROPERTIES': + break + + targets.append(curr) + + # Now we need to try to reconsitute the original quoted format of the + # arguments, as a property value could have spaces in it. Unlike + # set_property() this is not context free. There are two approaches I + # can think of, both have drawbacks: + # + # 1. Assume that the property will be capitalized ([A-Z_]), this is + # convention but cmake doesn't require it. + # 2. Maintain a copy of the list here: https://cmake.org/cmake/help/latest/manual/cmake-properties.7.html#target-properties + # + # Neither of these is awesome for obvious reasons. I'm going to try + # option 1 first and fall back to 2, as 1 requires less code and less + # synchroniztion for cmake changes. + # + # With the JSON output format, introduced in CMake 3.17, spaces are + # handled properly and we don't have to do either options + + arglist = [] # type: T.List[T.Tuple[str, T.List[str]]] + if self.trace_format == 'human': + name = args.pop(0) + values = [] # type: T.List[str] + prop_regex = re.compile(r'^[A-Z_]+$') + for a in args: + if prop_regex.match(a): + if values: + arglist.append((name, ' '.join(values).split(';'))) + name = a + values = [] + else: + values.append(a) + if values: + arglist.append((name, ' '.join(values).split(';'))) + else: + arglist = [(x[0], x[1].split(';')) for x in zip(args[::2], args[1::2])] + + for name, value in arglist: + for i in targets: + if i not in self.targets: + return self._gen_exception('set_target_properties', f'TARGET {i} not found', tline) + + self.targets[i].properties[name] = value + + def _cmake_add_dependencies(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_dependencies.html + args = list(tline.args) + + if len(args) < 2: + return self._gen_exception('add_dependencies', 'takes at least 2 arguments', tline) + + target = self.targets.get(args[0]) + if not target: + return self._gen_exception('add_dependencies', 'target not found', tline) + + for i in args[1:]: + target.depends += i.split(';') + + def _cmake_target_compile_definitions(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_compile_definitions.html + self._parse_common_target_options('target_compile_definitions', 'COMPILE_DEFINITIONS', 'INTERFACE_COMPILE_DEFINITIONS', tline) + + def _cmake_target_compile_options(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_compile_options.html + self._parse_common_target_options('target_compile_options', 'COMPILE_OPTIONS', 'INTERFACE_COMPILE_OPTIONS', tline) + + def _cmake_target_include_directories(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_include_directories.html + self._parse_common_target_options('target_include_directories', 'INCLUDE_DIRECTORIES', 'INTERFACE_INCLUDE_DIRECTORIES', tline, ignore=['SYSTEM', 'BEFORE'], paths=True) + + def _cmake_target_link_options(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_link_options.html + self._parse_common_target_options('target_link_options', 'LINK_OPTIONS', 'INTERFACE_LINK_OPTIONS', tline) + + def _cmake_target_link_libraries(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_link_libraries.html + self._parse_common_target_options('target_link_options', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline) + + def _parse_common_target_options(self, func: str, private_prop: str, interface_prop: str, tline: CMakeTraceLine, ignore: T.Optional[T.List[str]] = None, paths: bool = False) -> None: + if ignore is None: + ignore = ['BEFORE'] + + args = list(tline.args) + + if len(args) < 1: + return self._gen_exception(func, 'requires at least one argument', tline) + + target = args[0] + if target not in self.targets: + return self._gen_exception(func, f'TARGET {target} not found', tline) + + interface = [] + private = [] + + mode = 'PUBLIC' + for i in args[1:]: + if i in ignore: + continue + + if i in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'PRIVATE', 'LINK_PUBLIC', 'LINK_PRIVATE']: + mode = i + continue + + if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']: + interface += i.split(';') + + if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']: + private += i.split(';') + + if paths: + interface = self._guess_files(interface) + private = self._guess_files(private) + + interface = [x for x in interface if x] + private = [x for x in private if x] + + for j in [(private_prop, private), (interface_prop, interface)]: + if not j[0] in self.targets[target].properties: + self.targets[target].properties[j[0]] = [] + + self.targets[target].properties[j[0]] += j[1] + + def _meson_ps_execute_delayed_calls(self, tline: CMakeTraceLine) -> None: + for l in self.stored_commands: + fn = self.functions.get(l.func, None) + if(fn): + fn(l) + + # clear the stored commands + self.stored_commands = [] + + def _meson_ps_reload_vars(self, tline: CMakeTraceLine) -> None: + self.delayed_commands = self.get_cmake_var('MESON_PS_DELAYED_CALLS') + + def _meson_ps_disabled_function(self, tline: CMakeTraceLine) -> None: + args = list(tline.args) + if not args: + mlog.error('Invalid preload.cmake script! At least one argument to `meson_ps_disabled_function` is expected') + return + mlog.warning(f'The CMake function "{args[0]}" was disabled to avoid compatibility issues with Meson.') + + def _lex_trace_human(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]: + # The trace format is: '(): ( )\n' + reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE) + reg_other = re.compile(r'[^\n]*\n') + loc = 0 + while loc < len(trace): + mo_file_line = reg_tline.match(trace, loc) + if not mo_file_line: + skip_match = reg_other.match(trace, loc) + if not skip_match: + print(trace[loc:]) + raise CMakeException('Failed to parse CMake trace') + + loc = skip_match.end() + continue + + loc = mo_file_line.end() + + file = mo_file_line.group(1) + line = mo_file_line.group(3) + func = mo_file_line.group(4) + args = mo_file_line.group(5) + args = parse_generator_expressions(args) + argl = args.split(' ') + argl = list(map(lambda x: x.strip(), argl)) + + yield CMakeTraceLine(file, int(line), func, argl) + + def _lex_trace_json(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]: + lines = trace.splitlines(keepends=False) + lines.pop(0) # The first line is the version + for i in lines: + data = json.loads(i) + assert isinstance(data['file'], str) + assert isinstance(data['line'], int) + assert isinstance(data['cmd'], str) + assert isinstance(data['args'], list) + args = data['args'] + for j in args: + assert isinstance(j, str) + args = [parse_generator_expressions(x) for x in args] + yield CMakeTraceLine(data['file'], data['line'], data['cmd'], args) + + def _flatten_args(self, args: T.List[str]) -> T.List[str]: + # Split lists in arguments + res = [] # type: T.List[str] + for i in args: + res += i.split(';') + return res + + def _guess_files(self, broken_list: T.List[str]) -> T.List[str]: + # Nothing has to be done for newer formats + if self.trace_format != 'human': + return broken_list + + # Try joining file paths that contain spaces + + reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$') + reg_end = re.compile(r'^.*\.[a-zA-Z]+$') + + fixed_list = [] # type: T.List[str] + curr_str = None # type: T.Optional[str] + path_found = False # type: bool + + for i in broken_list: + if curr_str is None: + curr_str = i + path_found = False + elif Path(curr_str).is_file(): + # Abort concatenation if curr_str is an existing file + fixed_list += [curr_str] + curr_str = i + path_found = False + elif not reg_start.match(curr_str): + # Abort concatenation if curr_str no longer matches the regex + fixed_list += [curr_str] + curr_str = i + path_found = False + elif reg_end.match(i): + # File detected + curr_str = f'{curr_str} {i}' + fixed_list += [curr_str] + curr_str = None + path_found = False + elif Path(f'{curr_str} {i}').exists(): + # Path detected + curr_str = f'{curr_str} {i}' + path_found = True + elif path_found: + # Add path to fixed_list after ensuring the whole path is in curr_str + fixed_list += [curr_str] + curr_str = i + path_found = False + else: + curr_str = f'{curr_str} {i}' + path_found = False + + if curr_str: + fixed_list += [curr_str] + return fixed_list diff --git a/meson/mesonbuild/compilers/__init__.py b/meson/mesonbuild/compilers/__init__.py new file mode 100644 index 000000000..3d39c9b2f --- /dev/null +++ b/meson/mesonbuild/compilers/__init__.py @@ -0,0 +1,250 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Public symbols for compilers sub-package when using 'from . import compilers' +__all__ = [ + 'Compiler', + 'RunResult', + + 'all_languages', + 'base_options', + 'clib_langs', + 'clink_langs', + 'c_suffixes', + 'cpp_suffixes', + 'get_base_compile_args', + 'get_base_link_args', + 'is_assembly', + 'is_header', + 'is_library', + 'is_llvm_ir', + 'is_object', + 'is_source', + 'is_known_suffix', + 'lang_suffixes', + 'sort_clink', + + 'compiler_from_language', + 'detect_compiler_for', + 'detect_static_linker', + 'detect_c_compiler', + 'detect_cpp_compiler', + 'detect_cuda_compiler', + 'detect_fortran_compiler', + 'detect_objc_compiler', + 'detect_objcpp_compiler', + 'detect_java_compiler', + 'detect_cs_compiler', + 'detect_vala_compiler', + 'detect_rust_compiler', + 'detect_d_compiler', + 'detect_swift_compiler', + + 'AppleClangCCompiler', + 'AppleClangCPPCompiler', + 'AppleClangObjCCompiler', + 'AppleClangObjCPPCompiler', + 'ArmCCompiler', + 'ArmCPPCompiler', + 'ArmclangCCompiler', + 'ArmclangCPPCompiler', + 'CCompiler', + 'ClangCCompiler', + 'ClangCompiler', + 'ClangCPPCompiler', + 'ClangObjCCompiler', + 'ClangObjCPPCompiler', + 'ClangClCCompiler', + 'ClangClCPPCompiler', + 'CPPCompiler', + 'DCompiler', + 'DmdDCompiler', + 'FortranCompiler', + 'G95FortranCompiler', + 'GnuCCompiler', + 'ElbrusCCompiler', + 'EmscriptenCCompiler', + 'GnuCompiler', + 'GnuLikeCompiler', + 'GnuCPPCompiler', + 'ElbrusCPPCompiler', + 'EmscriptenCPPCompiler', + 'GnuDCompiler', + 'GnuFortranCompiler', + 'ElbrusFortranCompiler', + 'FlangFortranCompiler', + 'GnuObjCCompiler', + 'GnuObjCPPCompiler', + 'IntelGnuLikeCompiler', + 'IntelVisualStudioLikeCompiler', + 'IntelCCompiler', + 'IntelCPPCompiler', + 'IntelClCCompiler', + 'IntelClCPPCompiler', + 'IntelFortranCompiler', + 'IntelClFortranCompiler', + 'JavaCompiler', + 'LLVMDCompiler', + 'MonoCompiler', + 'CudaCompiler', + 'VisualStudioCsCompiler', + 'NAGFortranCompiler', + 'ObjCCompiler', + 'ObjCPPCompiler', + 'Open64FortranCompiler', + 'PathScaleFortranCompiler', + 'NvidiaHPC_CCompiler', + 'NvidiaHPC_CPPCompiler', + 'NvidiaHPC_FortranCompiler', + 'PGICCompiler', + 'PGICPPCompiler', + 'PGIFortranCompiler', + 'RustCompiler', + 'CcrxCCompiler', + 'CcrxCPPCompiler', + 'Xc16CCompiler', + 'CompCertCCompiler', + 'C2000CCompiler', + 'C2000CPPCompiler', + 'SunFortranCompiler', + 'SwiftCompiler', + 'ValaCompiler', + 'VisualStudioLikeCompiler', + 'VisualStudioCCompiler', + 'VisualStudioCPPCompiler', + 'CythonCompiler', +] + +# Bring symbols from each module into compilers sub-package namespace +from .compilers import ( + Compiler, + RunResult, + all_languages, + base_options, + clib_langs, + clink_langs, + c_suffixes, + cpp_suffixes, + get_base_compile_args, + get_base_link_args, + is_header, + is_source, + is_assembly, + is_llvm_ir, + is_object, + is_library, + is_known_suffix, + lang_suffixes, + LANGUAGES_USING_LDFLAGS, + sort_clink, +) +from .detect import ( + compiler_from_language, + detect_compiler_for, + detect_static_linker, + detect_c_compiler, + detect_cpp_compiler, + detect_cuda_compiler, + detect_objc_compiler, + detect_objcpp_compiler, + detect_fortran_compiler, + detect_java_compiler, + detect_cs_compiler, + detect_vala_compiler, + detect_rust_compiler, + detect_d_compiler, + detect_swift_compiler, +) +from .c import ( + CCompiler, + AppleClangCCompiler, + ArmCCompiler, + ArmclangCCompiler, + ClangCCompiler, + ClangClCCompiler, + GnuCCompiler, + ElbrusCCompiler, + EmscriptenCCompiler, + IntelCCompiler, + IntelClCCompiler, + NvidiaHPC_CCompiler, + PGICCompiler, + CcrxCCompiler, + Xc16CCompiler, + CompCertCCompiler, + C2000CCompiler, + VisualStudioCCompiler, +) +from .cpp import ( + CPPCompiler, + AppleClangCPPCompiler, + ArmCPPCompiler, + ArmclangCPPCompiler, + ClangCPPCompiler, + ClangClCPPCompiler, + GnuCPPCompiler, + ElbrusCPPCompiler, + EmscriptenCPPCompiler, + IntelCPPCompiler, + IntelClCPPCompiler, + NvidiaHPC_CPPCompiler, + PGICPPCompiler, + CcrxCPPCompiler, + C2000CPPCompiler, + VisualStudioCPPCompiler, +) +from .cs import MonoCompiler, VisualStudioCsCompiler +from .d import ( + DCompiler, + DmdDCompiler, + GnuDCompiler, + LLVMDCompiler, +) +from .cuda import CudaCompiler +from .fortran import ( + FortranCompiler, + G95FortranCompiler, + GnuFortranCompiler, + ElbrusFortranCompiler, + FlangFortranCompiler, + IntelFortranCompiler, + IntelClFortranCompiler, + NAGFortranCompiler, + Open64FortranCompiler, + PathScaleFortranCompiler, + NvidiaHPC_FortranCompiler, + PGIFortranCompiler, + SunFortranCompiler, +) +from .java import JavaCompiler +from .objc import ( + ObjCCompiler, + AppleClangObjCCompiler, + ClangObjCCompiler, + GnuObjCCompiler, +) +from .objcpp import ( + ObjCPPCompiler, + AppleClangObjCPPCompiler, + ClangObjCPPCompiler, + GnuObjCPPCompiler, +) +from .rust import RustCompiler +from .swift import SwiftCompiler +from .vala import ValaCompiler +from .mixins.visualstudio import VisualStudioLikeCompiler +from .mixins.gnu import GnuCompiler, GnuLikeCompiler +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .cython import CythonCompiler diff --git a/meson/mesonbuild/compilers/c.py b/meson/mesonbuild/compilers/c.py new file mode 100644 index 000000000..8f6218195 --- /dev/null +++ b/meson/mesonbuild/compilers/c.py @@ -0,0 +1,714 @@ +# Copyright 2012-2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import typing as T + +from .. import coredata +from .. import mlog +from ..mesonlib import MachineChoice, MesonException, version_compare, OptionKey +from .c_function_attributes import C_FUNC_ATTRIBUTES +from .mixins.clike import CLikeCompiler +from .mixins.ccrx import CcrxCompiler +from .mixins.xc16 import Xc16Compiler +from .mixins.compcert import CompCertCompiler +from .mixins.c2000 import C2000Compiler +from .mixins.arm import ArmCompiler, ArmclangCompiler +from .mixins.visualstudio import MSVCCompiler, ClangClCompiler +from .mixins.gnu import GnuCompiler +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .mixins.elbrus import ElbrusCompiler +from .mixins.pgi import PGICompiler +from .mixins.emscripten import EmscriptenMixin +from .compilers import ( + gnu_winlibs, + msvc_winlibs, + Compiler, +) + +if T.TYPE_CHECKING: + from ..coredata import KeyedOptionDictType + from ..dependencies import Dependency + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker + from ..programs import ExternalProgram + + CompilerMixinBase = Compiler +else: + CompilerMixinBase = object + + + +class CCompiler(CLikeCompiler, Compiler): + + @staticmethod + def attribute_check_func(name: str) -> str: + try: + return C_FUNC_ATTRIBUTES[name] + except KeyError: + raise MesonException(f'Unknown function attribute "{name}"') + + language = 'c' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + # If a child ObjC or CPP class has already set it, don't set it ourselves + Compiler.__init__(self, exelist, version, for_machine, info, + is_cross=is_cross, full_version=full_version, linker=linker) + CLikeCompiler.__init__(self, exe_wrapper) + + def get_no_stdinc_args(self) -> T.List[str]: + return ['-nostdinc'] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'int main(void) { int class=0; return class; }\n' + return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} + t = '''{prefix} + #include <{header}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + opts.update({ + OptionKey('std', machine=self.for_machine, lang=self.language): coredata.UserComboOption( + 'C language standard to use', + ['none'], + 'none', + ) + }) + return opts + + +class _ClangCStds(CompilerMixinBase): + + """Mixin class for clang based compilers for setting C standards. + + This is used by both ClangCCompiler and ClangClCompiler, as they share + the same versions + """ + + _C17_VERSION = '>=6.0.0' + _C18_VERSION = '>=8.0.0' + _C2X_VERSION = '>=9.0.0' + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + c_stds = ['c89', 'c99', 'c11'] + g_stds = ['gnu89', 'gnu99', 'gnu11'] + # https://releases.llvm.org/6.0.0/tools/clang/docs/ReleaseNotes.html + # https://en.wikipedia.org/wiki/Xcode#Latest_versions + if version_compare(self.version, self._C17_VERSION): + c_stds += ['c17'] + g_stds += ['gnu17'] + if version_compare(self.version, self._C18_VERSION): + c_stds += ['c18'] + g_stds += ['gnu18'] + if version_compare(self.version, self._C2X_VERSION): + c_stds += ['c2x'] + g_stds += ['gnu2x'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + g_stds + return opts + + +class ClangCCompiler(_ClangCStds, ClangCompiler, CCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version) + ClangCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + OptionKey('winlibs', machine=self.for_machine, lang=self.language): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + libs = options[OptionKey('winlibs', machine=self.for_machine, lang=self.language)].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + +class AppleClangCCompiler(ClangCCompiler): + + """Handle the differences between Apple Clang and Vanilla Clang. + + Right now this just handles the differences between the versions that new + C standards were added. + """ + + _C17_VERSION = '>=10.0.0' + _C18_VERSION = '>=11.0.0' + _C2X_VERSION = '>=11.0.0' + + +class EmscriptenCCompiler(EmscriptenMixin, ClangCCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + if not is_cross: + raise MesonException('Emscripten compiler can only be used for cross compilation.') + ClangCCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper=exe_wrapper, linker=linker, + defines=defines, full_version=full_version) + self.id = 'emscripten' + + +class ArmclangCCompiler(ArmclangCompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ArmclangCompiler.__init__(self) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c90', 'c99', 'c11', 'gnu90', 'gnu99', 'gnu11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + +class GnuCCompiler(GnuCompiler, CCompiler): + + _C18_VERSION = '>=8.0.0' + _C2X_VERSION = '>=9.0.0' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version) + GnuCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + c_stds = ['c89', 'c99', 'c11'] + g_stds = ['gnu89', 'gnu99', 'gnu11'] + if version_compare(self.version, self._C18_VERSION): + c_stds += ['c17', 'c18'] + g_stds += ['gnu17', 'gnu18'] + if version_compare(self.version, self._C2X_VERSION): + c_stds += ['c2x'] + g_stds += ['gnu2x'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + c_stds + g_stds + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + key.evolve('winlibs'): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', lang=self.language, machine=self.for_machine)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typeddict mypy can't figure this out + libs: T.List[str] = options[OptionKey('winlibs', lang=self.language, machine=self.for_machine)].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return ['-fpch-preprocess', '-include', os.path.basename(header)] + + +class PGICCompiler(PGICompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class NvidiaHPC_CCompiler(PGICompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + self.id = 'nvidia_hpc' + + +class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + GnuCCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, defines=defines, + linker=linker, full_version=full_version) + ElbrusCompiler.__init__(self) + + # It does support some various ISO standards and c/gnu 90, 9x, 1x in addition to those which GNU CC supports. + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = [ + 'none', 'c89', 'c90', 'c9x', 'c99', 'c1x', 'c11', + 'gnu89', 'gnu90', 'gnu9x', 'gnu99', 'gnu1x', 'gnu11', + 'iso9899:2011', 'iso9899:1990', 'iso9899:199409', 'iso9899:1999', + ] + return opts + + # Elbrus C compiler does not have lchmod, but there is only linker warning, not compiler error. + # So we should explicitly fail at this case. + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if funcname == 'lchmod': + return False, False + else: + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) + + +class IntelCCompiler(IntelGnuLikeCompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + IntelGnuLikeCompiler.__init__(self) + self.lang_header = 'c-header' + default_warn_args = ['-Wall', '-w3', '-diag-disable:remark'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + c_stds = ['c89', 'c99'] + g_stds = ['gnu89', 'gnu99'] + if version_compare(self.version, '>=16.0.0'): + c_stds += ['c11'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + g_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + +class VisualStudioLikeCCompilerMixin(CompilerMixinBase): + + """Shared methods that apply to MSVC-like C compilers.""" + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + opts.update({ + OptionKey('winlibs', machine=self.for_machine, lang=self.language): coredata.UserArrayOption( + 'Windows libs to link against.', + msvc_winlibs, + ), + }) + return opts + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + # need a TypeDict to make this work + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + libs = options[key].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + + +class VisualStudioCCompiler(MSVCCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + + _C11_VERSION = '>=19.28' + _C17_VERSION = '>=19.28' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + MSVCCompiler.__init__(self, target) + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + c_stds = ['c89', 'c99'] + # Need to have these to be compatible with projects + # that set c_std to e.g. gnu99. + # https://github.com/mesonbuild/meson/issues/7611 + g_stds = ['gnu89', 'gnu90', 'gnu9x', 'gnu99'] + if version_compare(self.version, self._C11_VERSION): + c_stds += ['c11'] + g_stds += ['gnu1x', 'gnu11'] + if version_compare(self.version, self._C17_VERSION): + c_stds += ['c17', 'c18'] + g_stds += ['gnu17', 'gnu18'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + c_stds + g_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value.startswith('gnu'): + mlog.log_once( + 'cl.exe does not actually support gnu standards, and meson ' + 'will instead demote to the nearest ISO C standard. This ' + 'may cause compilation to fail.') + # As of MVSC 16.8, /std:c11 and /std:c17 are the only valid C standard options. + if std.value in {'c11', 'gnu1x', 'gnu11'}: + args.append('/std:c11') + elif std.value in {'c17', 'c18', 'gnu17', 'gnu18'}: + args.append('/std:c17') + return args + + +class ClangClCCompiler(_ClangCStds, ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + ClangClCompiler.__init__(self, target) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key].value + if std != "none": + return [f'/clang:-std={std}'] + return [] + + +class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + + """Intel "ICL" compiler abstraction.""" + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + IntelVisualStudioLikeCompiler.__init__(self, target) + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'c11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value == 'c89': + mlog.log_once("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.") + elif std.value != 'none': + args.append('/Qstd:' + std.value) + return args + + +class ArmCCompiler(ArmCompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + ArmCompiler.__init__(self) + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'c11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--' + std.value) + return args + + +class CcrxCCompiler(CcrxCompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CcrxCompiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return ['-nologo'] + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99'] + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value == 'c89': + args.append('-lang=c') + elif std.value == 'c99': + args.append('-lang=c99') + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-optimize=0'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'-output=obj={target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-include=' + path] + + +class Xc16CCompiler(Xc16Compiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + Xc16Compiler.__init__(self) + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'gnu89', 'gnu99'] + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('-ansi') + args.append('-std=' + std.value) + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'-o{target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-I' + path] + +class CompCertCCompiler(CompCertCompiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CompCertCompiler.__init__(self) + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'-o{target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-I' + path] + +class C2000CCompiler(C2000Compiler, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + C2000Compiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return [] + + def get_options(self) -> 'KeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'c11'] + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--' + std.value) + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-Ooff'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'--output_file={target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['--include_path=' + path] diff --git a/meson/mesonbuild/compilers/c_function_attributes.py b/meson/mesonbuild/compilers/c_function_attributes.py new file mode 100644 index 000000000..f31229e09 --- /dev/null +++ b/meson/mesonbuild/compilers/c_function_attributes.py @@ -0,0 +1,132 @@ +# These functions are based on the following code: +# https://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_gcc_func_attribute.m4, +# which is licensed under the following terms: +# +# Copyright (c) 2013 Gabriele Svelto +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. +# + +C_FUNC_ATTRIBUTES = { + 'alias': ''' + int foo(void) { return 0; } + int bar(void) __attribute__((alias("foo")));''', + 'aligned': + 'int foo(void) __attribute__((aligned(32)));', + 'alloc_size': + 'void *foo(int a) __attribute__((alloc_size(1)));', + 'always_inline': + 'inline __attribute__((always_inline)) int foo(void) { return 0; }', + 'artificial': + 'inline __attribute__((artificial)) int foo(void) { return 0; }', + 'cold': + 'int foo(void) __attribute__((cold));', + 'const': + 'int foo(void) __attribute__((const));', + 'constructor': + 'int foo(void) __attribute__((constructor));', + 'constructor_priority': + 'int foo( void ) __attribute__((__constructor__(65535/2)));', + 'deprecated': + 'int foo(void) __attribute__((deprecated("")));', + 'destructor': + 'int foo(void) __attribute__((destructor));', + 'dllexport': + '__declspec(dllexport) int foo(void) { return 0; }', + 'dllimport': + '__declspec(dllimport) int foo(void);', + 'error': + 'int foo(void) __attribute__((error("")));', + 'externally_visible': + 'int foo(void) __attribute__((externally_visible));', + 'fallthrough': ''' + int foo( void ) { + switch (0) { + case 1: __attribute__((fallthrough)); + case 2: break; + } + return 0; + };''', + 'flatten': + 'int foo(void) __attribute__((flatten));', + 'format': + 'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));', + 'format_arg': + 'char * foo(const char * p) __attribute__((format_arg(1)));', + 'force_align_arg_pointer': + '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }', + 'gnu_inline': + 'inline __attribute__((gnu_inline)) int foo(void) { return 0; }', + 'hot': + 'int foo(void) __attribute__((hot));', + 'ifunc': + ('int my_foo(void) { return 0; }' + 'static int (*resolve_foo(void))(void) { return my_foo; }' + 'int foo(void) __attribute__((ifunc("resolve_foo")));'), + 'leaf': + '__attribute__((leaf)) int foo(void) { return 0; }', + 'malloc': + 'int *foo(void) __attribute__((malloc));', + 'noclone': + 'int foo(void) __attribute__((noclone));', + 'noinline': + '__attribute__((noinline)) int foo(void) { return 0; }', + 'nonnull': + 'int foo(char * p) __attribute__((nonnull(1)));', + 'noreturn': + 'int foo(void) __attribute__((noreturn));', + 'nothrow': + 'int foo(void) __attribute__((nothrow));', + 'optimize': + '__attribute__((optimize(3))) int foo(void) { return 0; }', + 'packed': + 'struct __attribute__((packed)) foo { int bar; };', + 'pure': + 'int foo(void) __attribute__((pure));', + 'returns_nonnull': + 'int *foo(void) __attribute__((returns_nonnull));', + 'unused': + 'int foo(void) __attribute__((unused));', + 'used': + 'int foo(void) __attribute__((used));', + 'visibility': ''' + int foo_def(void) __attribute__((visibility("default"))); + int foo_hid(void) __attribute__((visibility("hidden"))); + int foo_int(void) __attribute__((visibility("internal")));''', + 'visibility:default': + 'int foo(void) __attribute__((visibility("default")));', + 'visibility:hidden': + 'int foo(void) __attribute__((visibility("hidden")));', + 'visibility:internal': + 'int foo(void) __attribute__((visibility("internal")));', + 'visibility:protected': + 'int foo(void) __attribute__((visibility("protected")));', + 'warning': + 'int foo(void) __attribute__((warning("")));', + 'warn_unused_result': + 'int foo(void) __attribute__((warn_unused_result));', + 'weak': + 'int foo(void) __attribute__((weak));', + 'weakref': ''' + static int foo(void) { return 0; } + static int var(void) __attribute__((weakref("foo")));''', +} + +CXX_FUNC_ATTRIBUTES = { + # Alias must be applied to the mangled name in C++ + 'alias': + ('extern "C" {' + 'int foo(void) { return 0; }' + '}' + 'int bar(void) __attribute__((alias("foo")));' + ), + 'ifunc': + ('extern "C" {' + 'int my_foo(void) { return 0; }' + 'static int (*resolve_foo(void))(void) { return my_foo; }' + '}' + 'int foo(void) __attribute__((ifunc("resolve_foo")));'), +} diff --git a/meson/mesonbuild/compilers/compilers.py b/meson/mesonbuild/compilers/compilers.py new file mode 100644 index 000000000..0aae6e528 --- /dev/null +++ b/meson/mesonbuild/compilers/compilers.py @@ -0,0 +1,1294 @@ +# Copyright 2012-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +import contextlib, os.path, re +import enum +import itertools +import typing as T +from functools import lru_cache + +from .. import coredata +from .. import mlog +from .. import mesonlib +from ..mesonlib import ( + HoldableObject, + EnvironmentException, MachineChoice, MesonException, + Popen_safe, LibType, TemporaryDirectoryWinProof, OptionKey, +) + +from ..arglist import CompilerArgs + +if T.TYPE_CHECKING: + from ..build import BuildTarget + from ..coredata import OptionDictType, KeyedOptionDictType + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker, RSPFileSyntax + from ..dependencies import Dependency + + CompilerType = T.TypeVar('CompilerType', bound=Compiler) + _T = T.TypeVar('_T') + +"""This file contains the data files of all compilers Meson knows +about. To support a new compiler, add its information below. +Also add corresponding autodetection code in environment.py.""" + +header_suffixes = ('h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di') # type: T.Tuple[str, ...] +obj_suffixes = ('o', 'obj', 'res') # type: T.Tuple[str, ...] +lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so') # type: T.Tuple[str, ...] +# Mapping of language to suffixes of files that should always be in that language +# This means we can't include .h headers here since they could be C, C++, ObjC, etc. +lang_suffixes = { + 'c': ('c',), + 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino', 'ixx', 'C'), + 'cuda': ('cu',), + # f90, f95, f03, f08 are for free-form fortran ('f90' recommended) + # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended) + 'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'), + 'd': ('d', 'di'), + 'objc': ('m',), + 'objcpp': ('mm',), + 'rust': ('rs',), + 'vala': ('vala', 'vapi', 'gs'), + 'cs': ('cs',), + 'swift': ('swift',), + 'java': ('java',), + 'cython': ('pyx', ), +} # type: T.Dict[str, T.Tuple[str, ...]] +all_languages = lang_suffixes.keys() +cpp_suffixes = lang_suffixes['cpp'] + ('h',) # type: T.Tuple[str, ...] +c_suffixes = lang_suffixes['c'] + ('h',) # type: T.Tuple[str, ...] +# List of languages that by default consume and output libraries following the +# C ABI; these can generally be used interchangeably +clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'fortran',) # type: T.Tuple[str, ...] +# List of assembler suffixes that can be linked with C code directly by the linker +assembler_suffixes: T.Tuple[str, ...] = ('s', 'S') +# List of languages that can be linked with C code directly by the linker +# used in build.py:process_compilers() and build.py:get_dynamic_linker() +clink_langs = ('d', 'cuda') + clib_langs # type: T.Tuple[str, ...] +clink_suffixes = tuple() # type: T.Tuple[str, ...] +for _l in clink_langs + ('vala',): + clink_suffixes += lang_suffixes[_l] +clink_suffixes += ('h', 'll', 's') +all_suffixes = set(itertools.chain(*lang_suffixes.values(), clink_suffixes)) # type: T.Set[str] + +# Languages that should use LDFLAGS arguments when linking. +LANGUAGES_USING_LDFLAGS = {'objcpp', 'cpp', 'objc', 'c', 'fortran', 'd', 'cuda'} # type: T.Set[str] +# Languages that should use CPPFLAGS arguments when linking. +LANGUAGES_USING_CPPFLAGS = {'c', 'cpp', 'objc', 'objcpp'} # type: T.Set[str] +soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + +# Environment variables that each lang uses. +CFLAGS_MAPPING: T.Mapping[str, str] = { + 'c': 'CFLAGS', + 'cpp': 'CXXFLAGS', + 'cuda': 'CUFLAGS', + 'objc': 'OBJCFLAGS', + 'objcpp': 'OBJCXXFLAGS', + 'fortran': 'FFLAGS', + 'd': 'DFLAGS', + 'vala': 'VALAFLAGS', + 'rust': 'RUSTFLAGS', + 'cython': 'CYTHONFLAGS', +} + +CEXE_MAPPING: T.Mapping = { + 'c': 'CC', + 'cpp': 'CXX', +} + +# All these are only for C-linkable languages; see `clink_langs` above. + +def sort_clink(lang: str) -> int: + ''' + Sorting function to sort the list of languages according to + reversed(compilers.clink_langs) and append the unknown langs in the end. + The purpose is to prefer C over C++ for files that can be compiled by + both such as assembly, C, etc. Also applies to ObjC, ObjC++, etc. + ''' + if lang not in clink_langs: + return 1 + return -clink_langs.index(lang) + +def is_header(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in header_suffixes + +def is_source(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1].lower() + return suffix in clink_suffixes + +def is_assembly(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + return fname.split('.')[-1].lower() == 's' + +def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + return fname.split('.')[-1] == 'll' + +@lru_cache(maxsize=None) +def cached_by_name(fname: 'mesonlib.FileOrString') -> bool: + suffix = fname.split('.')[-1] + return suffix in obj_suffixes + +def is_object(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + return cached_by_name(fname) + +def is_library(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + + if soregex.match(fname): + return True + + suffix = fname.split('.')[-1] + return suffix in lib_suffixes + +def is_known_suffix(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + + return suffix in all_suffixes + + +class CompileCheckMode(enum.Enum): + + PREPROCESS = 'preprocess' + COMPILE = 'compile' + LINK = 'link' + + +cuda_buildtype_args = {'plain': [], + 'debug': ['-g', '-G'], + 'debugoptimized': ['-g', '-lineinfo'], + 'release': [], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] +java_buildtype_args = {'plain': [], + 'debug': ['-g'], + 'debugoptimized': ['-g'], + 'release': [], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +rust_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +d_gdc_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': ['-finline-functions'], + 'release': ['-finline-functions'], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +d_ldc_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'], + 'release': ['-enable-inlining', '-Hkeep-all-bodies'], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +d_dmd_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': ['-inline'], + 'release': ['-inline'], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +mono_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': ['-optimize+'], + 'release': ['-optimize+'], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +swift_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], + } # type: T.Dict[str, T.List[str]] + +gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32', + '-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32'] # type: T.List[str] + +msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib', + 'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib', + 'uuid.lib', 'comdlg32.lib', 'advapi32.lib'] # type: T.List[str] + +clike_optimization_args = {'0': [], + 'g': [], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Os'], + } # type: T.Dict[str, T.List[str]] + +cuda_optimization_args = {'0': [], + 'g': ['-O0'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-O3'] + } # type: T.Dict[str, T.List[str]] + +cuda_debug_args = {False: [], + True: ['-g']} # type: T.Dict[bool, T.List[str]] + +clike_debug_args = {False: [], + True: ['-g']} # type: T.Dict[bool, T.List[str]] + +base_options: 'KeyedOptionDictType' = { + OptionKey('b_pch'): coredata.UserBooleanOption('Use precompiled headers', True), + OptionKey('b_lto'): coredata.UserBooleanOption('Use link time optimization', False), + OptionKey('b_lto'): coredata.UserBooleanOption('Use link time optimization', False), + OptionKey('b_lto_threads'): coredata.UserIntegerOption('Use multiple threads for Link Time Optimization', (None, None,0)), + OptionKey('b_lto_mode'): coredata.UserComboOption('Select between different LTO modes.', + ['default', 'thin'], + 'default'), + OptionKey('b_sanitize'): coredata.UserComboOption('Code sanitizer to use', + ['none', 'address', 'thread', 'undefined', 'memory', 'address,undefined'], + 'none'), + OptionKey('b_lundef'): coredata.UserBooleanOption('Use -Wl,--no-undefined when linking', True), + OptionKey('b_asneeded'): coredata.UserBooleanOption('Use -Wl,--as-needed when linking', True), + OptionKey('b_pgo'): coredata.UserComboOption('Use profile guided optimization', + ['off', 'generate', 'use'], + 'off'), + OptionKey('b_coverage'): coredata.UserBooleanOption('Enable coverage tracking.', False), + OptionKey('b_colorout'): coredata.UserComboOption('Use colored output', + ['auto', 'always', 'never'], + 'always'), + OptionKey('b_ndebug'): coredata.UserComboOption('Disable asserts', ['true', 'false', 'if-release'], 'false'), + OptionKey('b_staticpic'): coredata.UserBooleanOption('Build static libraries as position independent', True), + OptionKey('b_pie'): coredata.UserBooleanOption('Build executables as position independent', False), + OptionKey('b_bitcode'): coredata.UserBooleanOption('Generate and embed bitcode (only macOS/iOS/tvOS)', False), + OptionKey('b_vscrt'): coredata.UserComboOption('VS run-time library type to use.', + ['none', 'md', 'mdd', 'mt', 'mtd', 'from_buildtype', 'static_from_buildtype'], + 'from_buildtype'), +} + +def option_enabled(boptions: T.Set[OptionKey], options: 'KeyedOptionDictType', + option: OptionKey) -> bool: + try: + if option not in boptions: + return False + ret = options[option].value + assert isinstance(ret, bool), 'must return bool' # could also be str + return ret + except KeyError: + return False + + +def get_option_value(options: 'KeyedOptionDictType', opt: OptionKey, fallback: '_T') -> '_T': + """Get the value of an option, or the fallback value.""" + try: + v: '_T' = options[opt].value + except KeyError: + return fallback + + assert isinstance(v, type(fallback)), f'Should have {type(fallback)!r} but was {type(v)!r}' + # Mypy doesn't understand that the above assert ensures that v is type _T + return v + + +def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler') -> T.List[str]: + args = [] # type T.List[str] + try: + if options[OptionKey('b_lto')].value: + args.extend(compiler.get_lto_compile_args( + threads=get_option_value(options, OptionKey('b_lto_threads'), 0), + mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'))) + except KeyError: + pass + try: + args += compiler.get_colorout_args(options[OptionKey('b_colorout')].value) + except KeyError: + pass + try: + args += compiler.sanitizer_compile_args(options[OptionKey('b_sanitize')].value) + except KeyError: + pass + try: + pgo_val = options[OptionKey('b_pgo')].value + if pgo_val == 'generate': + args.extend(compiler.get_profile_generate_args()) + elif pgo_val == 'use': + args.extend(compiler.get_profile_use_args()) + except KeyError: + pass + try: + if options[OptionKey('b_coverage')].value: + args += compiler.get_coverage_args() + except KeyError: + pass + try: + if (options[OptionKey('b_ndebug')].value == 'true' or + (options[OptionKey('b_ndebug')].value == 'if-release' and + options[OptionKey('buildtype')].value in {'release', 'plain'})): + args += compiler.get_disable_assert_args() + except KeyError: + pass + # This does not need a try...except + if option_enabled(compiler.base_options, options, OptionKey('b_bitcode')): + args.append('-fembed-bitcode') + try: + crt_val = options[OptionKey('b_vscrt')].value + buildtype = options[OptionKey('buildtype')].value + try: + args += compiler.get_crt_compile_args(crt_val, buildtype) + except AttributeError: + pass + except KeyError: + pass + return args + +def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler', + is_shared_module: bool) -> T.List[str]: + args = [] # type: T.List[str] + try: + if options[OptionKey('b_lto')].value: + args.extend(linker.get_lto_link_args( + threads=get_option_value(options, OptionKey('b_lto_threads'), 0), + mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'))) + except KeyError: + pass + try: + args += linker.sanitizer_link_args(options[OptionKey('b_sanitize')].value) + except KeyError: + pass + try: + pgo_val = options[OptionKey('b_pgo')].value + if pgo_val == 'generate': + args.extend(linker.get_profile_generate_args()) + elif pgo_val == 'use': + args.extend(linker.get_profile_use_args()) + except KeyError: + pass + try: + if options[OptionKey('b_coverage')].value: + args += linker.get_coverage_link_args() + except KeyError: + pass + + as_needed = option_enabled(linker.base_options, options, OptionKey('b_asneeded')) + bitcode = option_enabled(linker.base_options, options, OptionKey('b_bitcode')) + # Shared modules cannot be built with bitcode_bundle because + # -bitcode_bundle is incompatible with -undefined and -bundle + if bitcode and not is_shared_module: + args.extend(linker.bitcode_args()) + elif as_needed: + # -Wl,-dead_strip_dylibs is incompatible with bitcode + args.extend(linker.get_asneeded_args()) + + # Apple's ld (the only one that supports bitcode) does not like -undefined + # arguments or -headerpad_max_install_names when bitcode is enabled + if not bitcode: + args.extend(linker.headerpad_args()) + if (not is_shared_module and + option_enabled(linker.base_options, options, OptionKey('b_lundef'))): + args.extend(linker.no_undefined_link_args()) + else: + args.extend(linker.get_allow_undefined_link_args()) + + try: + crt_val = options[OptionKey('b_vscrt')].value + buildtype = options[OptionKey('buildtype')].value + try: + args += linker.get_crt_link_args(crt_val, buildtype) + except AttributeError: + pass + except KeyError: + pass + return args + + +class CrossNoRunException(MesonException): + pass + +class RunResult(HoldableObject): + def __init__(self, compiled: bool, returncode: int = 999, + stdout: str = 'UNDEFINED', stderr: str = 'UNDEFINED'): + self.compiled = compiled + self.returncode = returncode + self.stdout = stdout + self.stderr = stderr + + +class CompileResult(HoldableObject): + + """The result of Compiler.compiles (and friends).""" + + def __init__(self, stdo: T.Optional[str] = None, stde: T.Optional[str] = None, + args: T.Optional[T.List[str]] = None, + returncode: int = 999, pid: int = -1, + text_mode: bool = True, + input_name: T.Optional[str] = None, + output_name: T.Optional[str] = None, + command: T.Optional[T.List[str]] = None, cached: bool = False): + self.stdout = stdo + self.stderr = stde + self.input_name = input_name + self.output_name = output_name + self.command = command or [] + self.args = args or [] + self.cached = cached + self.returncode = returncode + self.pid = pid + self.text_mode = text_mode + + +class Compiler(HoldableObject, metaclass=abc.ABCMeta): + # Libraries to ignore in find_library() since they are provided by the + # compiler or the C library. Currently only used for MSVC. + ignore_libs = [] # type: T.List[str] + # Libraries that are internal compiler implementations, and must not be + # manually searched. + internal_libs = [] # type: T.List[str] + + LINKER_PREFIX = None # type: T.Union[None, str, T.List[str]] + INVOKES_LINKER = True + + # TODO: these could be forward declarations once we drop 3.5 support + if T.TYPE_CHECKING: + language = 'unset' + id = '' + warn_args = {} # type: T.Dict[str, T.List[str]] + + def __init__(self, exelist: T.List[str], version: str, + for_machine: MachineChoice, info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, is_cross: bool = False): + self.exelist = exelist + # In case it's been overridden by a child class already + if not hasattr(self, 'file_suffixes'): + self.file_suffixes = lang_suffixes[self.language] + if not hasattr(self, 'can_compile_suffixes'): + self.can_compile_suffixes = set(self.file_suffixes) + self.default_suffix = self.file_suffixes[0] + self.version = version + self.full_version = full_version + self.for_machine = for_machine + self.base_options: T.Set[OptionKey] = set() + self.linker = linker + self.info = info + self.is_cross = is_cross + + def __repr__(self) -> str: + repr_str = "<{0}: v{1} `{2}`>" + return repr_str.format(self.__class__.__name__, self.version, + ' '.join(self.exelist)) + + @lru_cache(maxsize=None) + def can_compile(self, src: 'mesonlib.FileOrString') -> bool: + if isinstance(src, mesonlib.File): + src = src.fname + suffix = os.path.splitext(src)[1] + if suffix != '.C': + suffix = suffix.lower() + return bool(suffix) and suffix[1:] in self.can_compile_suffixes + + def get_id(self) -> str: + return self.id + + def get_linker_id(self) -> str: + # There is not guarantee that we have a dynamic linker instance, as + # some languages don't have separate linkers and compilers. In those + # cases return the compiler id + try: + return self.linker.id + except AttributeError: + return self.id + + def get_version_string(self) -> str: + details = [self.id, self.version] + if self.full_version: + details += ['"%s"' % (self.full_version)] + return '(%s)' % (' '.join(details)) + + def get_language(self) -> str: + return self.language + + @classmethod + def get_display_language(cls) -> str: + return cls.language.capitalize() + + def get_default_suffix(self) -> str: + return self.default_suffix + + def get_define(self, dname: str, prefix: str, env: 'Environment', + extra_args: T.List[str], dependencies: T.List['Dependency'], + disable_cache: bool = False) -> T.Tuple[str, bool]: + raise EnvironmentException('%s does not support get_define ' % self.get_id()) + + def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]], dependencies: T.Optional[T.List['Dependency']]) -> int: + raise EnvironmentException('%s does not support compute_int ' % self.get_id()) + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + raise EnvironmentException('%s does not support compute_parameters_with_absolute_paths ' % self.get_id()) + + def has_members(self, typename: str, membernames: T.List[str], + prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) + + def has_type(self, typename: str, prefix: str, env: 'Environment', + extra_args: T.List[str], *, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise EnvironmentException('%s does not support has_type ' % self.get_id()) + + def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: + raise EnvironmentException('%s does not support symbols_have_underscore_prefix ' % self.get_id()) + + def get_exelist(self) -> T.List[str]: + return self.exelist.copy() + + def get_linker_exelist(self) -> T.List[str]: + return self.linker.get_exelist() + + @abc.abstractmethod + def get_output_args(self, outputname: str) -> T.List[str]: + pass + + def get_linker_output_args(self, outputname: str) -> T.List[str]: + return self.linker.get_output_args(outputname) + + def get_linker_search_args(self, dirname: str) -> T.List[str]: + return self.linker.get_search_args(dirname) + + def get_builtin_define(self, define: str) -> T.Optional[str]: + raise EnvironmentException('%s does not support get_builtin_define.' % self.id) + + def has_builtin_define(self, define: str) -> bool: + raise EnvironmentException('%s does not support has_builtin_define.' % self.id) + + def get_always_args(self) -> T.List[str]: + return [] + + def can_linker_accept_rsp(self) -> bool: + """ + Determines whether the linker can accept arguments using the @rsp syntax. + """ + return self.linker.get_accepts_rsp() + + def get_linker_always_args(self) -> T.List[str]: + return self.linker.get_always_args() + + def get_linker_lib_prefix(self) -> str: + return self.linker.get_lib_prefix() + + def gen_import_library_args(self, implibname: str) -> T.List[str]: + """ + Used only on Windows for libraries that need an import library. + This currently means C, C++, Fortran. + """ + return [] + + def get_options(self) -> 'KeyedOptionDictType': + return {} + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return self.linker.get_option_args(options) + + def check_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """Check that header is usable. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ + raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + """Check that header is exists. + + This check will return true if the file exists, even if it contains: + + ```c + # error "You thought you could use this, LOLZ!" + ``` + + Use check_header if your header only works in some cases. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ + raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language()) + + def run(self, code: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> RunResult: + raise EnvironmentException('Language %s does not support run checks.' % self.get_display_language()) + + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + raise EnvironmentException('Language %s does not support sizeof checks.' % self.get_display_language()) + + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language()) + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """See if a function exists. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ + raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language()) + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + "Always returns a copy that can be independently mutated" + return args.copy() + + @classmethod + def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]: + "Always returns a copy that can be independently mutated" + return args.copy() + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: + raise EnvironmentException(f'Language {self.get_display_language()} does not support library finding.') + + def get_library_naming(self, env: 'Environment', libtype: LibType, + strict: bool = False) -> T.Optional[T.Tuple[str, ...]]: + raise EnvironmentException( + 'Language {} does not support get_library_naming.'.format( + self.get_display_language())) + + def get_program_dirs(self, env: 'Environment') -> T.List[str]: + return [] + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + raise EnvironmentException( + 'Language {} does not support has_multi_arguments.'.format( + self.get_display_language())) + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self.linker.has_multi_arguments(args, env) + + def _get_compile_output(self, dirname: str, mode: str) -> str: + # TODO: mode should really be an enum + # In pre-processor mode, the output is sent to stdout and discarded + if mode == 'preprocess': + return None + # Extension only matters if running results; '.exe' is + # guaranteed to be executable on every platform. + if mode == 'link': + suffix = 'exe' + else: + suffix = 'obj' + return os.path.join(dirname, 'output.' + suffix) + + def get_compiler_args_for_mode(self, mode: CompileCheckMode) -> T.List[str]: + # TODO: mode should really be an enum + args = [] # type: T.List[str] + args += self.get_always_args() + if mode is CompileCheckMode.COMPILE: + args += self.get_compile_only_args() + elif mode is CompileCheckMode.PREPROCESS: + args += self.get_preprocess_only_args() + else: + assert mode is CompileCheckMode.LINK + return args + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs: + """Return an appropriate CompilerArgs instance for this class.""" + return CompilerArgs(self, args) + + @contextlib.contextmanager + def compile(self, code: 'mesonlib.FileOrString', + extra_args: T.Union[None, CompilerArgs, T.List[str]] = None, + *, mode: str = 'link', want_output: bool = False, + temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]: + # TODO: there isn't really any reason for this to be a contextmanager + if extra_args is None: + extra_args = [] + + with TemporaryDirectoryWinProof(dir=temp_dir) as tmpdirname: + no_ccache = False + if isinstance(code, str): + srcname = os.path.join(tmpdirname, + 'testfile.' + self.default_suffix) + with open(srcname, 'w', encoding='utf-8') as ofile: + ofile.write(code) + # ccache would result in a cache miss + no_ccache = True + contents = code + elif isinstance(code, mesonlib.File): + srcname = code.fname + with open(code.fname, encoding='utf-8') as f: + contents = f.read() + + # Construct the compiler command-line + commands = self.compiler_args() + commands.append(srcname) + # Preprocess mode outputs to stdout, so no output args + if mode != 'preprocess': + output = self._get_compile_output(tmpdirname, mode) + commands += self.get_output_args(output) + commands.extend(self.get_compiler_args_for_mode(CompileCheckMode(mode))) + # extra_args must be last because it could contain '/link' to + # pass args to VisualStudio's linker. In that case everything + # in the command line after '/link' is given to the linker. + commands += extra_args + # Generate full command-line with the exelist + command_list = self.get_exelist() + commands.to_native() + mlog.debug('Running compile:') + mlog.debug('Working directory: ', tmpdirname) + mlog.debug('Command line: ', ' '.join(command_list), '\n') + mlog.debug('Code:\n', contents) + os_env = os.environ.copy() + os_env['LC_ALL'] = 'C' + if no_ccache: + os_env['CCACHE_DISABLE'] = '1' + p, stdo, stde = Popen_safe(command_list, cwd=tmpdirname, env=os_env) + mlog.debug('Compiler stdout:\n', stdo) + mlog.debug('Compiler stderr:\n', stde) + + result = CompileResult(stdo, stde, list(commands), p.returncode, p.pid, input_name=srcname) + if want_output: + result.output_name = output + yield result + + @contextlib.contextmanager + def cached_compile(self, code: str, cdata: coredata.CoreData, *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + mode: str = 'link', + temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]: + # TODO: There's isn't really any reason for this to be a context manager + + # Calculate the key + textra_args = tuple(extra_args) if extra_args is not None else tuple() # type: T.Tuple[str, ...] + key = (tuple(self.exelist), self.version, code, textra_args, mode) # type: coredata.CompilerCheckCacheKey + + # Check if not cached, and generate, otherwise get from the cache + if key in cdata.compiler_check_cache: + p = cdata.compiler_check_cache[key] # type: CompileResult + p.cached = True + mlog.debug('Using cached compile:') + mlog.debug('Cached command line: ', ' '.join(p.command), '\n') + mlog.debug('Code:\n', code) + mlog.debug('Cached compiler stdout:\n', p.stdout) + mlog.debug('Cached compiler stderr:\n', p.stderr) + yield p + else: + with self.compile(code, extra_args=extra_args, mode=mode, want_output=False, temp_dir=temp_dir) as p: + cdata.compiler_check_cache[key] = p + yield p + + def get_colorout_args(self, colortype: str) -> T.List[str]: + # TODO: colortype can probably be an emum + return [] + + # Some compilers (msvc) write debug info to a separate file. + # These args specify where it should be written. + def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]: + return [] + + def get_link_debugfile_name(self, targetfile: str) -> str: + return self.linker.get_debugfile_name(targetfile) + + def get_link_debugfile_args(self, targetfile: str) -> T.List[str]: + return self.linker.get_debugfile_args(targetfile) + + def get_std_shared_lib_link_args(self) -> T.List[str]: + return self.linker.get_std_shared_lib_args() + + def get_std_shared_module_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return self.linker.get_std_shared_module_args(options) + + def get_link_whole_for(self, args: T.List[str]) -> T.List[str]: + return self.linker.get_link_whole_for(args) + + def get_allow_undefined_link_args(self) -> T.List[str]: + return self.linker.get_allow_undefined_args() + + def no_undefined_link_args(self) -> T.List[str]: + return self.linker.no_undefined_args() + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + """Compiler arguments needed to enable the given instruction set. + + Return type ay be an empty list meaning nothing needed or None + meaning the given set is not supported. + """ + return None + + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: str, build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return self.linker.build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def thread_link_flags(self, env: 'Environment') -> T.List[str]: + return self.linker.thread_flags(env) + + def openmp_flags(self) -> T.List[str]: + raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language()) + + def openmp_link_flags(self) -> T.List[str]: + return self.openmp_flags() + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return [] + + def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]: + return [] + + def get_gui_app_args(self, value: bool) -> T.List[str]: + # Only used on Windows + return self.linker.get_gui_app_args(value) + + def get_win_subsystem_args(self, value: str) -> T.List[str]: + # By default the dynamic linker is going to return an empty + # array in case it either doesn't support Windows subsystems + # or does not target Windows + return self.linker.get_win_subsystem_args(value) + + def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]: + raise EnvironmentException( + f'Language {self.get_display_language()} does not support function attributes.') + + def get_pic_args(self) -> T.List[str]: + m = 'Language {} does not support position-independent code' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_args(self) -> T.List[str]: + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_link_args(self) -> T.List[str]: + return self.linker.get_pie_args() + + def get_argument_syntax(self) -> str: + """Returns the argument family type. + + Compilers fall into families if they try to emulate the command line + interface of another compiler. For example, clang is in the GCC family + since it accepts most of the same arguments as GCC. ICL (ICC on + windows) is in the MSVC family since it accepts most of the same + arguments as MSVC. + """ + return 'other' + + def get_profile_generate_args(self) -> T.List[str]: + raise EnvironmentException( + '%s does not support get_profile_generate_args ' % self.get_id()) + + def get_profile_use_args(self) -> T.List[str]: + raise EnvironmentException( + '%s does not support get_profile_use_args ' % self.get_id()) + + def remove_linkerlike_args(self, args: T.List[str]) -> T.List[str]: + rm_exact = ('-headerpad_max_install_names',) + rm_prefixes = ('-Wl,', '-L',) + rm_next = ('-L', '-framework',) + ret = [] # T.List[str] + iargs = iter(args) + for arg in iargs: + # Remove this argument + if arg in rm_exact: + continue + # If the argument starts with this, but is not *exactly* this + # f.ex., '-L' should match ['-Lfoo'] but not ['-L', 'foo'] + if arg.startswith(rm_prefixes) and arg not in rm_prefixes: + continue + # Ignore this argument and the one after it + if arg in rm_next: + next(iargs) + continue + ret.append(arg) + return ret + + def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]: + return [] + + def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]: + return self.linker.get_lto_args() + + def sanitizer_compile_args(self, value: str) -> T.List[str]: + return [] + + def sanitizer_link_args(self, value: str) -> T.List[str]: + return self.linker.sanitizer_args(value) + + def get_asneeded_args(self) -> T.List[str]: + return self.linker.get_asneeded_args() + + def headerpad_args(self) -> T.List[str]: + return self.linker.headerpad_args() + + def bitcode_args(self) -> T.List[str]: + return self.linker.bitcode_args() + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not implement get_buildtype_args') + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + return self.linker.get_buildtype_args(buildtype) + + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str], + is_shared_module: bool) -> T.List[str]: + return self.linker.get_soname_args( + env, prefix, shlib_name, suffix, soversion, + darwin_versions, is_shared_module) + + def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]: + return target.link_args + + def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: + return dep.get_compile_args() + + def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: + return dep.get_link_args() + + @classmethod + def use_linker_args(cls, linker: str) -> T.List[str]: + """Get a list of arguments to pass to the compiler to set the linker. + """ + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_coverage_link_args(self) -> T.List[str]: + return self.linker.get_coverage_args() + + def get_disable_assert_args(self) -> T.List[str]: + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + raise EnvironmentException('This compiler does not support Windows CRT selection') + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + raise EnvironmentException('This compiler does not support Windows CRT selection') + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_preprocess_only_args(self) -> T.List[str]: + raise EnvironmentException('This compiler does not have a preprocessor') + + def get_default_include_dirs(self) -> T.List[str]: + # TODO: This is a candidate for returning an immutable list + return [] + + def get_largefile_args(self) -> T.List[str]: + '''Enable transparent large-file-support for 32-bit UNIX systems''' + if not (self.get_argument_syntax() == 'msvc' or self.info.is_darwin()): + # Enable large-file support unconditionally on all platforms other + # than macOS and MSVC. macOS is now 64-bit-only so it doesn't + # need anything special, and MSVC doesn't have automatic LFS. + # You must use the 64-bit counterparts explicitly. + # glibc, musl, and uclibc, and all BSD libcs support this. On Android, + # support for transparent LFS is available depending on the version of + # Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs + # https://code.google.com/p/android/issues/detail?id=64613 + # + # If this breaks your code, fix it! It's been 20+ years! + return ['-D_FILE_OFFSET_BITS=64'] + # We don't enable -D_LARGEFILE64_SOURCE since that enables + # transitionary features and must be enabled by programs that use + # those features explicitly. + return [] + + def get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + return [] + + def get_return_value(self, + fname: str, + rtype: str, + prefix: str, + env: 'Environment', + extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]: + raise EnvironmentException(f'{self.id} does not support get_return_value') + + def find_framework(self, + name: str, + env: 'Environment', + extra_dirs: T.List[str], + allow_system: bool = True) -> T.Optional[T.List[str]]: + raise EnvironmentException(f'{self.id} does not support find_framework') + + def find_framework_paths(self, env: 'Environment') -> T.List[str]: + raise EnvironmentException(f'{self.id} does not support find_framework_paths') + + def attribute_check_func(self, name: str) -> str: + raise EnvironmentException(f'{self.id} does not support attribute checks') + + def get_pch_suffix(self) -> str: + raise EnvironmentException(f'{self.id} does not support pre compiled headers') + + def get_pch_name(self, name: str) -> str: + raise EnvironmentException(f'{self.id} does not support pre compiled headers') + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not support pre compiled headers') + + def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not support function attributes') + + def name_string(self) -> str: + return ' '.join(self.exelist) + + @abc.abstractmethod + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + """Check that this compiler actually works. + + This should provide a simple compile/link test. Somthing as simple as: + ```python + main(): return 0 + ``` + is good enough here. + """ + + def split_shlib_to_parts(self, fname: str) -> T.Tuple[T.Optional[str], str]: + return None, fname + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return [] + + def get_std_exe_link_args(self) -> T.List[str]: + # TODO: is this a linker property? + return [] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + return [] + + def depfile_for_object(self, objfile: str) -> str: + return objfile + '.' + self.get_depfile_suffix() + + def get_depfile_suffix(self) -> str: + raise EnvironmentException(f'{self.id} does not implement get_depfile_suffix') + + def get_no_stdinc_args(self) -> T.List[str]: + """Arguments to turn off default inclusion of standard libraries.""" + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return [] + + def get_werror_args(self) -> T.List[str]: + return [] + + @abc.abstractmethod + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + pass + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + raise EnvironmentException(f'{self.id} does not implement get_module_incdir_args') + + def get_module_outdir_args(self, path: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not implement get_module_outdir_args') + + def module_name_to_filename(self, module_name: str) -> str: + raise EnvironmentException(f'{self.id} does not implement module_name_to_filename') + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + """Arguments to pass the compiler and/or linker for checks. + + The default implementation turns off optimizations. + + Examples of things that go here: + - extra arguments for error checking + - Arguments required to make the compiler exit with a non-zero status + when something is wrong. + """ + return self.get_no_optimization_args() + + def get_no_optimization_args(self) -> T.List[str]: + """Arguments to the compiler to turn off all optimizations.""" + return [] + + def build_wrapper_args(self, env: 'Environment', + extra_args: T.Union[None, CompilerArgs, T.List[str]], + dependencies: T.Optional[T.List['Dependency']], + mode: CompileCheckMode = CompileCheckMode.COMPILE) -> CompilerArgs: + """Arguments to pass the build_wrapper helper. + + This generally needs to be set on a per-language baises. It provides + a hook for languages to handle dependencies and extra args. The base + implementation handles the most common cases, namely adding the + check_arguments, unwrapping dependencies, and appending extra args. + """ + if callable(extra_args): + extra_args = extra_args(mode) + if extra_args is None: + extra_args = [] + if dependencies is None: + dependencies = [] + + # Collect compiler arguments + args = self.compiler_args(self.get_compiler_check_args(mode)) + for d in dependencies: + # Add compile flags needed by dependencies + args += d.get_compile_args() + if mode is CompileCheckMode.LINK: + # Add link flags needed to find dependencies + args += d.get_link_args() + + if mode is CompileCheckMode.COMPILE: + # Add DFLAGS from the env + args += env.coredata.get_external_args(self.for_machine, self.language) + elif mode is CompileCheckMode.LINK: + # Add LDFLAGS from the env + args += env.coredata.get_external_link_args(self.for_machine, self.language) + # extra_args must override all other arguments, so we add them last + args += extra_args + return args + + @contextlib.contextmanager + def _build_wrapper(self, code: str, env: 'Environment', + extra_args: T.Union[None, CompilerArgs, T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: str = 'compile', want_output: bool = False, + disable_cache: bool = False, + temp_dir: str = None) -> T.Iterator[T.Optional[CompileResult]]: + """Helper for getting a cacched value when possible. + + This method isn't meant to be called externally, it's mean to be + wrapped by other methods like compiles() and links(). + """ + args = self.build_wrapper_args(env, extra_args, dependencies, CompileCheckMode(mode)) + if disable_cache or want_output: + with self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir) as r: + yield r + else: + with self.cached_compile(code, env.coredata, extra_args=args, mode=mode, temp_dir=env.scratch_dir) as r: + yield r + + def compiles(self, code: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: str = 'compile', + disable_cache: bool = False) -> T.Tuple[bool, bool]: + with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p: + return p.returncode == 0, p.cached + + + def links(self, code: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: str = 'compile', + disable_cache: bool = False) -> T.Tuple[bool, bool]: + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode='link', disable_cache=disable_cache) + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + """Used by D for extra language features.""" + # TODO: using a TypeDict here would improve this + raise EnvironmentException(f'{self.id} does not implement get_feature_args') + + def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not know how to do prelinking.') + + def rsp_file_syntax(self) -> 'RSPFileSyntax': + """The format of the RSP file that this compiler supports. + + If `self.can_linker_accept_rsp()` returns True, then this needs to + be implemented + """ + return self.linker.rsp_file_syntax() + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + """Arguments required for a debug build.""" + return [] + + +def get_global_options(lang: str, + comp: T.Type[Compiler], + for_machine: MachineChoice, + env: 'Environment') -> 'KeyedOptionDictType': + """Retrieve options that apply to all compilers for a given language.""" + description = f'Extra arguments passed to the {lang}' + argkey = OptionKey('args', lang=lang, machine=for_machine) + largkey = argkey.evolve('link_args') + envkey = argkey.evolve('env_args') + + comp_key = argkey if argkey in env.options else envkey + + comp_options = env.options.get(comp_key, []) + link_options = env.options.get(largkey, []) + + cargs = coredata.UserArrayOption( + description + ' compiler', + comp_options, split_args=True, user_input=True, allow_dups=True) + + largs = coredata.UserArrayOption( + description + ' linker', + link_options, split_args=True, user_input=True, allow_dups=True) + + if comp.INVOKES_LINKER and comp_key == envkey: + # If the compiler acts as a linker driver, and we're using the + # environment variable flags for both the compiler and linker + # arguments, then put the compiler flags in the linker flags as well. + # This is how autotools works, and the env vars freature is for + # autotools compatibility. + largs.extend_value(comp_options) + + opts: 'KeyedOptionDictType' = {argkey: cargs, largkey: largs} + + return opts diff --git a/meson/mesonbuild/compilers/cpp.py b/meson/mesonbuild/compilers/cpp.py new file mode 100644 index 000000000..44155d1bd --- /dev/null +++ b/meson/mesonbuild/compilers/cpp.py @@ -0,0 +1,823 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import functools +import os.path +import typing as T + +from .. import coredata +from .. import mlog +from ..mesonlib import MesonException, MachineChoice, version_compare, OptionKey + +from .compilers import ( + gnu_winlibs, + msvc_winlibs, + Compiler, + CompileCheckMode, +) +from .c_function_attributes import CXX_FUNC_ATTRIBUTES, C_FUNC_ATTRIBUTES +from .mixins.clike import CLikeCompiler +from .mixins.ccrx import CcrxCompiler +from .mixins.c2000 import C2000Compiler +from .mixins.arm import ArmCompiler, ArmclangCompiler +from .mixins.visualstudio import MSVCCompiler, ClangClCompiler +from .mixins.gnu import GnuCompiler +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .mixins.elbrus import ElbrusCompiler +from .mixins.pgi import PGICompiler +from .mixins.emscripten import EmscriptenMixin + +if T.TYPE_CHECKING: + from ..coredata import KeyedOptionDictType + from ..dependencies import Dependency + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker + from ..programs import ExternalProgram + from .mixins.clike import CLikeCompiler as CompilerMixinBase +else: + CompilerMixinBase = object + + +def non_msvc_eh_options(eh: str, args: T.List[str]) -> None: + if eh == 'none': + args.append('-fno-exceptions') + elif eh == 's' or eh == 'c': + mlog.warning('non-MSVC compilers do not support ' + eh + ' exception handling.' + + 'You may want to set eh to \'default\'.') + +class CPPCompiler(CLikeCompiler, Compiler): + + @classmethod + def attribute_check_func(cls, name: str) -> str: + try: + return CXX_FUNC_ATTRIBUTES.get(name, C_FUNC_ATTRIBUTES[name]) + except KeyError: + raise MesonException(f'Unknown function attribute "{name}"') + + language = 'cpp' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + # If a child ObjCPP class has already set it, don't set it ourselves + Compiler.__init__(self, exelist, version, for_machine, info, + is_cross=is_cross, linker=linker, + full_version=full_version) + CLikeCompiler.__init__(self, exe_wrapper) + + @staticmethod + def get_display_language() -> str: + return 'C++' + + def get_no_stdinc_args(self) -> T.List[str]: + return ['-nostdinc++'] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'class breakCCompiler;int main(void) { return 0; }\n' + return self._sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code) + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # -fpermissive allows non-conforming code to compile which is necessary + # for many C++ checks. Particularly, the has_header_symbol check is + # too strict without this and always fails. + return super().get_compiler_check_args(mode) + ['-fpermissive'] + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + # Check if it's a C-like symbol + found, cached = super().has_header_symbol(hname, symbol, prefix, env, + extra_args=extra_args, + dependencies=dependencies) + if found: + return True, cached + # Check if it's a class or a template + if extra_args is None: + extra_args = [] + t = f'''{prefix} + #include <{hname}> + using {symbol}; + int main(void) {{ return 0; }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def _test_cpp_std_arg(self, cpp_std_value: str) -> bool: + # Test whether the compiler understands a -std=XY argument + assert(cpp_std_value.startswith('-std=')) + + # This test does not use has_multi_arguments() for two reasons: + # 1. has_multi_arguments() requires an env argument, which the compiler + # object does not have at this point. + # 2. even if it did have an env object, that might contain another more + # recent -std= argument, which might lead to a cascaded failure. + CPP_TEST = 'int i = static_cast(0);' + with self.compile(CPP_TEST, extra_args=[cpp_std_value], mode='compile') as p: + if p.returncode == 0: + mlog.debug(f'Compiler accepts {cpp_std_value}:', 'YES') + return True + else: + mlog.debug(f'Compiler accepts {cpp_std_value}:', 'NO') + return False + + @functools.lru_cache() + def _find_best_cpp_std(self, cpp_std: str) -> str: + # The initial version mapping approach to make falling back + # from '-std=c++14' to '-std=c++1y' was too brittle. For instance, + # Apple's Clang uses a different versioning scheme to upstream LLVM, + # making the whole detection logic awfully brittle. Instead, let's + # just see if feeding GCC or Clang our '-std=' setting works, and + # if not, try the fallback argument. + CPP_FALLBACKS = { + 'c++11': 'c++0x', + 'gnu++11': 'gnu++0x', + 'c++14': 'c++1y', + 'gnu++14': 'gnu++1y', + 'c++17': 'c++1z', + 'gnu++17': 'gnu++1z', + 'c++20': 'c++2a', + 'gnu++20': 'gnu++2a', + } + + # Currently, remapping is only supported for Clang, Elbrus and GCC + assert(self.id in frozenset(['clang', 'lcc', 'gcc', 'emscripten'])) + + if cpp_std not in CPP_FALLBACKS: + # 'c++03' and 'c++98' don't have fallback types + return '-std=' + cpp_std + + for i in (cpp_std, CPP_FALLBACKS[cpp_std]): + cpp_std_value = '-std=' + i + if self._test_cpp_std_arg(cpp_std_value): + return cpp_std_value + + raise MesonException(f'C++ Compiler does not support -std={cpp_std}') + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key: coredata.UserComboOption( + 'C++ language standard to use', + ['none'], + 'none', + ), + }) + return opts + + +class ClangCPPCompiler(ClangCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ClangCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('key', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + }) + opts[key.evolve('std')].choices = [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', + 'c++2a', 'c++20', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z', + 'gnu++2a', 'gnu++20', + ] + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + key.evolve('winlibs'): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + if not options[key.evolve('rtti')].value: + args.append('-fno-rtti') + + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + libs = options[key].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lstdc++'] + + +class AppleClangCPPCompiler(ClangCPPCompiler): + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lc++'] + + +class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + if not is_cross: + raise MesonException('Emscripten compiler can only be used for cross compilation.') + ClangCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper=exe_wrapper, linker=linker, + defines=defines, full_version=full_version) + self.id = 'emscripten' + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + return args + + +class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ArmclangCompiler.__init__(self) + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + }) + opts[key].choices = [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'gnu++98', + 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', + ] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('-std=' + std.value) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + +class GnuCPPCompiler(GnuCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + GnuCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self) -> 'KeyedOptionDictType': + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts = CPPCompiler.get_options(self) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + key.evolve('debugstl'): coredata.UserBooleanOption( + 'STL debug mode', + False, + ) + }) + opts[key].choices = [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', + 'c++2a', 'c++20', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', + 'gnu++1z', 'gnu++2a', 'gnu++20', + ] + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + key.evolve('winlibs'): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + if not options[key.evolve('rtti')].value: + args.append('-fno-rtti') + + if options[key.evolve('debugstl')].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + libs = options[key].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return ['-fpch-preprocess', '-include', os.path.basename(header)] + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lstdc++'] + + +class PGICPPCompiler(PGICompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class NvidiaHPC_CPPCompiler(PGICompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + self.id = 'nvidia_hpc' + + +class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + GnuCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version, defines=defines) + ElbrusCompiler.__init__(self) + + def get_options(self) -> 'KeyedOptionDictType': + opts = CPPCompiler.get_options(self) + + cpp_stds = [ + 'none', 'c++98', 'c++03', 'c++0x', 'c++11', 'c++14', 'c++1y', + 'gnu++98', 'gnu++03', 'gnu++0x', 'gnu++11', 'gnu++14', 'gnu++1y', + ] + + if version_compare(self.version, '>=1.24.00'): + cpp_stds += [ 'c++1z', 'c++17', 'gnu++1z', 'gnu++17' ] + + if version_compare(self.version, '>=1.25.00'): + cpp_stds += [ 'c++2a', 'gnu++2a' ] + + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('debugstl'): coredata.UserBooleanOption( + 'STL debug mode', + False, + ), + }) + opts[key].choices = cpp_stds + return opts + + # Elbrus C++ compiler does not have lchmod, but there is only linker warning, not compiler error. + # So we should explicitly fail at this case. + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if funcname == 'lchmod': + return False, False + else: + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) + + # Elbrus C++ compiler does not support RTTI, so don't check for it. + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + if options[key.evolve('debugstl')].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + +class IntelCPPCompiler(IntelGnuLikeCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + IntelGnuLikeCompiler.__init__(self) + self.lang_header = 'c++-header' + default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', + '-Wpch-messages', '-Wnon-virtual-dtor'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = CPPCompiler.get_options(self) + # Every Unix compiler under the sun seems to accept -std=c++03, + # with the exception of ICC. Instead of preventing the user from + # globally requesting C++03, we transparently remap it to C++98 + c_stds = ['c++98', 'c++03'] + g_stds = ['gnu++98', 'gnu++03'] + if version_compare(self.version, '>=15.0.0'): + c_stds += ['c++11', 'c++14'] + g_stds += ['gnu++11'] + if version_compare(self.version, '>=16.0.0'): + c_stds += ['c++17'] + if version_compare(self.version, '>=17.0.0'): + g_stds += ['gnu++14'] + if version_compare(self.version, '>=19.1.0'): + c_stds += ['c++2a'] + g_stds += ['gnu++2a'] + + + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + key.evolve('debugstl'): coredata.UserBooleanOption('STL debug mode', False), + }) + opts[key].choices = ['none'] + c_stds + g_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + remap_cpp03 = { + 'c++03': 'c++98', + 'gnu++03': 'gnu++98' + } + args.append('-std=' + remap_cpp03.get(std.value, std.value)) + if options[key.evolve('eh')].value == 'none': + args.append('-fno-exceptions') + if not options[key.evolve('rtti')].value: + args.append('-fno-rtti') + if options[key.evolve('debugstl')].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + +class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase): + + """Mixin for C++ specific method overrides in MSVC-like compilers.""" + + VC_VERSION_MAP = { + 'none': (True, None), + 'vc++11': (True, 11), + 'vc++14': (True, 14), + 'vc++17': (True, 17), + 'vc++latest': (True, "latest"), + 'c++11': (False, 11), + 'c++14': (False, 14), + 'c++17': (False, 17), + 'c++latest': (False, "latest"), + } + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + # need a typeddict for this + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + return T.cast(T.List[str], options[key].value[:]) + + def _get_options_impl(self, opts: 'KeyedOptionDictType', cpp_stds: T.List[str]) -> 'KeyedOptionDictType': + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + key.evolve('winlibs'): coredata.UserArrayOption( + 'Windows libs to link against.', + msvc_winlibs, + ), + }) + opts[key.evolve('std')].choices = cpp_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + + eh = options[key.evolve('eh')] + if eh.value == 'default': + args.append('/EHsc') + elif eh.value == 'none': + args.append('/EHs-c-') + else: + args.append('/EH' + eh.value) + + if not options[key.evolve('rtti')].value: + args.append('/GR-') + + permissive, ver = self.VC_VERSION_MAP[options[key].value] + + if ver is not None: + args.append(f'/std:c++{ver}') + + if not permissive: + args.append('/permissive-') + + return args + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class. + return Compiler.get_compiler_check_args(self, mode) + + +class CPP11AsCPP14Mixin(CompilerMixinBase): + + """Mixin class for VisualStudio and ClangCl to replace C++11 std with C++14. + + This is a limitation of Clang and MSVC that ICL doesn't share. + """ + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can + # which means setting the C++ standard version to C++14, in compilers that support it + # (i.e., after VS2015U3) + # if one is using anything before that point, one cannot set the standard. + key = OptionKey('std', machine=self.for_machine, lang=self.language) + if options[key].value in {'vc++11', 'c++11'}: + mlog.warning(self.id, 'does not support C++11;', + 'attempting best effort; setting the standard to C++14', once=True) + # Don't mutate anything we're going to change, we need to use + # deepcopy since we're messing with members, and we can't simply + # copy the members because the option proxy doesn't support it. + options = copy.deepcopy(options) + if options[key].value == 'vc++11': + options[key].value = 'vc++14' + else: + options[key].value = 'c++14' + return super().get_option_compile_args(options) + + +class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, MSVCCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MSVCCompiler.__init__(self, target) + self.id = 'msvc' + + def get_options(self) -> 'KeyedOptionDictType': + cpp_stds = ['none', 'c++11', 'vc++11'] + # Visual Studio 2015 and later + if version_compare(self.version, '>=19'): + cpp_stds.extend(['c++14', 'c++latest', 'vc++latest']) + # Visual Studio 2017 and later + if version_compare(self.version, '>=19.11'): + cpp_stds.extend(['vc++14', 'c++17', 'vc++17']) + return self._get_options_impl(super().get_options(), cpp_stds) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + key = OptionKey('std', machine=self.for_machine, lang=self.language) + if options[key].value != 'none' and version_compare(self.version, '<19.00.24210'): + mlog.warning('This version of MSVC does not support cpp_std arguments') + options = copy.copy(options) + options[key].value = 'none' + + args = super().get_option_compile_args(options) + + if version_compare(self.version, '<19.11'): + try: + i = args.index('/permissive-') + except ValueError: + return args + del args[i] + return args + +class ClangClCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, ClangClCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ClangClCompiler.__init__(self, target) + self.id = 'clang-cl' + + def get_options(self) -> 'KeyedOptionDictType': + cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest'] + return self._get_options_impl(super().get_options(), cpp_stds) + + +class IntelClCPPCompiler(VisualStudioLikeCPPCompilerMixin, IntelVisualStudioLikeCompiler, CPPCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + IntelVisualStudioLikeCompiler.__init__(self, target) + + def get_options(self) -> 'KeyedOptionDictType': + # This has only been tested with version 19.0, + cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest'] + return self._get_options_impl(super().get_options(), cpp_stds) + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class. + return IntelVisualStudioLikeCompiler.get_compiler_check_args(self, mode) + + +class ArmCPPCompiler(ArmCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ArmCompiler.__init__(self) + + def get_options(self) -> 'KeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c++03', 'c++11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value == 'c++11': + args.append('--cpp11') + elif std.value == 'c++03': + args.append('--cpp') + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + return [] + + +class CcrxCPPCompiler(CcrxCompiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CcrxCompiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return ['-nologo', '-lang=cpp'] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-output=obj=%s' % target] + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + return [] + +class C2000CPPCompiler(C2000Compiler, CPPCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + C2000Compiler.__init__(self) + + def get_options(self) -> 'KeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c++03'] + return opts + + def get_always_args(self) -> T.List[str]: + return ['-nologo', '-lang=cpp'] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-output=obj=%s' % target] + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + return [] diff --git a/meson/mesonbuild/compilers/cs.py b/meson/mesonbuild/compilers/cs.py new file mode 100644 index 000000000..7ebb66def --- /dev/null +++ b/meson/mesonbuild/compilers/cs.py @@ -0,0 +1,150 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path, subprocess +import textwrap +import typing as T + +from ..mesonlib import EnvironmentException +from ..linkers import RSPFileSyntax + +from .compilers import Compiler, MachineChoice, mono_buildtype_args +from .mixins.islinker import BasicLinkerIsCompilerMixin + +if T.TYPE_CHECKING: + from ..envconfig import MachineInfo + from ..environment import Environment + +cs_optimization_args = {'0': [], + 'g': [], + '1': ['-optimize+'], + '2': ['-optimize+'], + '3': ['-optimize+'], + 's': ['-optimize+'], + } # type: T.Dict[str, T.List[str]] + + +class CsCompiler(BasicLinkerIsCompilerMixin, Compiler): + + language = 'cs' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', comp_id: str, runner: T.Optional[str] = None): + super().__init__(exelist, version, for_machine, info) + self.id = comp_id + self.runner = runner + + @classmethod + def get_display_language(cls) -> str: + return 'C sharp' + + def get_always_args(self) -> T.List[str]: + return ['/nologo'] + + def get_linker_always_args(self) -> T.List[str]: + return ['/nologo'] + + def get_output_args(self, fname: str) -> T.List[str]: + return ['-out:' + fname] + + def get_link_args(self, fname: str) -> T.List[str]: + return ['-r:' + fname] + + def get_werror_args(self) -> T.List[str]: + return ['-warnaserror'] + + def get_pic_args(self) -> T.List[str]: + return [] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + if i[:5] == '-lib:': + parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:])) + + return parameter_list + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def get_pch_name(self, header_name: str) -> str: + return '' + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + src = 'sanity.cs' + obj = 'sanity.exe' + source_name = os.path.join(work_dir, src) + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write(textwrap.dedent(''' + public class Sanity { + static public void Main () { + } + } + ''')) + pc = subprocess.Popen(self.exelist + self.get_always_args() + [src], cwd=work_dir) + pc.wait() + if pc.returncode != 0: + raise EnvironmentException('C# compiler %s can not compile programs.' % self.name_string()) + if self.runner: + cmdlist = [self.runner, obj] + else: + cmdlist = [os.path.join(work_dir, obj)] + pe = subprocess.Popen(cmdlist, cwd=work_dir) + pe.wait() + if pe.returncode != 0: + raise EnvironmentException('Executables created by Mono compiler %s are not runnable.' % self.name_string()) + + def needs_static_linker(self) -> bool: + return False + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return mono_buildtype_args[buildtype] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return ['-debug'] if is_debug else [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return cs_optimization_args[optimization_level] + + +class MonoCompiler(CsCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo'): + super().__init__(exelist, version, for_machine, info, 'mono', + runner='mono') + + def rsp_file_syntax(self) -> 'RSPFileSyntax': + return RSPFileSyntax.GCC + + +class VisualStudioCsCompiler(CsCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo'): + super().__init__(exelist, version, for_machine, info, 'csc') + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + res = mono_buildtype_args[buildtype] + if not self.info.is_windows(): + tmp = [] + for flag in res: + if flag == '-debug': + flag = '-debug:portable' + tmp.append(flag) + res = tmp + return res + + def rsp_file_syntax(self) -> 'RSPFileSyntax': + return RSPFileSyntax.MSVC diff --git a/meson/mesonbuild/compilers/cuda.py b/meson/mesonbuild/compilers/cuda.py new file mode 100644 index 000000000..36da833be --- /dev/null +++ b/meson/mesonbuild/compilers/cuda.py @@ -0,0 +1,760 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum +import os.path +import string +import typing as T + +from .. import coredata +from .. import mlog +from ..mesonlib import ( + EnvironmentException, MachineChoice, Popen_safe, OptionOverrideProxy, + is_windows, LibType, OptionKey, +) +from .compilers import (Compiler, cuda_buildtype_args, cuda_optimization_args, + cuda_debug_args) + +if T.TYPE_CHECKING: + from ..build import BuildTarget + from ..coredata import KeyedOptionDictType + from ..dependencies import Dependency + from ..environment import Environment # noqa: F401 + from ..envconfig import MachineInfo + from ..linkers import DynamicLinker + from ..programs import ExternalProgram + + +class _Phase(enum.Enum): + + COMPILER = 'compiler' + LINKER = 'linker' + + +class CudaCompiler(Compiler): + + LINKER_PREFIX = '-Xlinker=' + language = 'cuda' + + # NVCC flags taking no arguments. + _FLAG_PASSTHRU_NOARGS = { + # NVCC --long-option, NVCC -short-option CUDA Toolkit 11.2.1 Reference + '--objdir-as-tempdir', '-objtemp', # 4.2.1.2 + '--generate-dependency-targets', '-MP', # 4.2.1.12 + '--allow-unsupported-compiler', '-allow-unsupported-compiler', # 4.2.1.14 + '--link', # 4.2.2.1 + '--lib', '-lib', # 4.2.2.2 + '--device-link', '-dlink', # 4.2.2.3 + '--device-c', '-dc', # 4.2.2.4 + '--device-w', '-dw', # 4.2.2.5 + '--cuda', '-cuda', # 4.2.2.6 + '--compile', '-c', # 4.2.2.7 + '--fatbin', '-fatbin', # 4.2.2.8 + '--cubin', '-cubin', # 4.2.2.9 + '--ptx', '-ptx', # 4.2.2.10 + '--preprocess', '-E', # 4.2.2.11 + '--generate-dependencies', '-M', # 4.2.2.12 + '--generate-nonsystem-dependencies', '-MM', # 4.2.2.13 + '--generate-dependencies-with-compile', '-MD', # 4.2.2.14 + '--generate-nonsystem-dependencies-with-compile', '-MMD', # 4.2.2.15 + '--run', # 4.2.2.16 + '--profile', '-pg', # 4.2.3.1 + '--debug', '-g', # 4.2.3.2 + '--device-debug', '-G', # 4.2.3.3 + '--extensible-whole-program', '-ewp', # 4.2.3.4 + '--generate-line-info', '-lineinfo', # 4.2.3.5 + '--dlink-time-opt', '-dlto', # 4.2.3.8 + '--no-exceptions', '-noeh', # 4.2.3.11 + '--shared', '-shared', # 4.2.3.12 + '--no-host-device-initializer-list', '-nohdinitlist', # 4.2.3.15 + '--expt-relaxed-constexpr', '-expt-relaxed-constexpr', # 4.2.3.16 + '--extended-lambda', '-extended-lambda', # 4.2.3.17 + '--expt-extended-lambda', '-expt-extended-lambda', # 4.2.3.18 + '--m32', '-m32', # 4.2.3.20 + '--m64', '-m64', # 4.2.3.21 + '--forward-unknown-to-host-compiler', '-forward-unknown-to-host-compiler', # 4.2.5.1 + '--forward-unknown-to-host-linker', '-forward-unknown-to-host-linker', # 4.2.5.2 + '--dont-use-profile', '-noprof', # 4.2.5.3 + '--dryrun', '-dryrun', # 4.2.5.5 + '--verbose', '-v', # 4.2.5.6 + '--keep', '-keep', # 4.2.5.7 + '--save-temps', '-save-temps', # 4.2.5.9 + '--clean-targets', '-clean', # 4.2.5.10 + '--no-align-double', # 4.2.5.16 + '--no-device-link', '-nodlink', # 4.2.5.17 + '--allow-unsupported-compiler', '-allow-unsupported-compiler', # 4.2.5.18 + '--use_fast_math', '-use_fast_math', # 4.2.7.7 + '--extra-device-vectorization', '-extra-device-vectorization', # 4.2.7.12 + '--compile-as-tools-patch', '-astoolspatch', # 4.2.7.13 + '--keep-device-functions', '-keep-device-functions', # 4.2.7.14 + '--disable-warnings', '-w', # 4.2.8.1 + '--source-in-ptx', '-src-in-ptx', # 4.2.8.2 + '--restrict', '-restrict', # 4.2.8.3 + '--Wno-deprecated-gpu-targets', '-Wno-deprecated-gpu-targets', # 4.2.8.4 + '--Wno-deprecated-declarations', '-Wno-deprecated-declarations', # 4.2.8.5 + '--Wreorder', '-Wreorder', # 4.2.8.6 + '--Wdefault-stream-launch', '-Wdefault-stream-launch', # 4.2.8.7 + '--Wext-lambda-captures-this', '-Wext-lambda-captures-this', # 4.2.8.8 + '--display-error-number', '-err-no', # 4.2.8.10 + '--resource-usage', '-res-usage', # 4.2.8.14 + '--help', '-h', # 4.2.8.15 + '--version', '-V', # 4.2.8.16 + '--list-gpu-code', '-code-ls', # 4.2.8.20 + '--list-gpu-arch', '-arch-ls', # 4.2.8.21 + } + # Dictionary of NVCC flags taking either one argument or a comma-separated list. + # Maps --long to -short options, because the short options are more GCC-like. + _FLAG_LONG2SHORT_WITHARGS = { + '--output-file': '-o', # 4.2.1.1 + '--pre-include': '-include', # 4.2.1.3 + '--library': '-l', # 4.2.1.4 + '--define-macro': '-D', # 4.2.1.5 + '--undefine-macro': '-U', # 4.2.1.6 + '--include-path': '-I', # 4.2.1.7 + '--system-include': '-isystem', # 4.2.1.8 + '--library-path': '-L', # 4.2.1.9 + '--output-directory': '-odir', # 4.2.1.10 + '--dependency-output': '-MF', # 4.2.1.11 + '--compiler-bindir': '-ccbin', # 4.2.1.13 + '--archiver-binary': '-arbin', # 4.2.1.15 + '--cudart': '-cudart', # 4.2.1.16 + '--cudadevrt': '-cudadevrt', # 4.2.1.17 + '--libdevice-directory': '-ldir', # 4.2.1.18 + '--target-directory': '-target-dir', # 4.2.1.19 + '--optimization-info': '-opt-info', # 4.2.3.6 + '--optimize': '-O', # 4.2.3.7 + '--ftemplate-backtrace-limit': '-ftemplate-backtrace-limit', # 4.2.3.9 + '--ftemplate-depth': '-ftemplate-depth', # 4.2.3.10 + '--x': '-x', # 4.2.3.13 + '--std': '-std', # 4.2.3.14 + '--machine': '-m', # 4.2.3.19 + '--compiler-options': '-Xcompiler', # 4.2.4.1 + '--linker-options': '-Xlinker', # 4.2.4.2 + '--archive-options': '-Xarchive', # 4.2.4.3 + '--ptxas-options': '-Xptxas', # 4.2.4.4 + '--nvlink-options': '-Xnvlink', # 4.2.4.5 + '--threads': '-t', # 4.2.5.4 + '--keep-dir': '-keep-dir', # 4.2.5.8 + '--run-args': '-run-args', # 4.2.5.11 + '--input-drive-prefix': '-idp', # 4.2.5.12 + '--dependency-drive-prefix': '-ddp', # 4.2.5.13 + '--drive-prefix': '-dp', # 4.2.5.14 + '--dependency-target-name': '-MT', # 4.2.5.15 + '--default-stream': '-default-stream', # 4.2.6.1 + '--gpu-architecture': '-arch', # 4.2.7.1 + '--gpu-code': '-code', # 4.2.7.2 + '--generate-code': '-gencode', # 4.2.7.3 + '--relocatable-device-code': '-rdc', # 4.2.7.4 + '--entries': '-e', # 4.2.7.5 + '--maxrregcount': '-maxrregcount', # 4.2.7.6 + '--ftz': '-ftz', # 4.2.7.8 + '--prec-div': '-prec-div', # 4.2.7.9 + '--prec-sqrt': '-prec-sqrt', # 4.2.7.10 + '--fmad': '-fmad', # 4.2.7.11 + '--Werror': '-Werror', # 4.2.8.9 + '--diag-error': '-diag-error', # 4.2.8.11 + '--diag-suppress': '-diag-suppress', # 4.2.8.12 + '--diag-warn': '-diag-warn', # 4.2.8.13 + '--options-file': '-optf', # 4.2.8.17 + '--time': '-time', # 4.2.8.18 + '--qpp-config': '-qpp-config', # 4.2.8.19 + } + # Reverse map -short to --long options. + _FLAG_SHORT2LONG_WITHARGS = {v:k for k,v in _FLAG_LONG2SHORT_WITHARGS.items()} + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, exe_wrapper: T.Optional['ExternalProgram'], + host_compiler: Compiler, info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + super().__init__(exelist, version, for_machine, info, linker=linker, full_version=full_version, is_cross=is_cross) + self.exe_wrapper = exe_wrapper + self.host_compiler = host_compiler + self.base_options = host_compiler.base_options + self.id = 'nvcc' + self.warn_args = {level: self._to_host_flags(flags) for level, flags in host_compiler.warn_args.items()} + + @classmethod + def _shield_nvcc_list_arg(cls, arg: str, listmode: bool=True) -> str: + r""" + Shield an argument against both splitting by NVCC's list-argument + parse logic, and interpretation by any shell. + + NVCC seems to consider every comma , that is neither escaped by \ nor inside + a double-quoted string a split-point. Single-quotes do not provide protection + against splitting; In fact, after splitting they are \-escaped. Unfortunately, + double-quotes don't protect against shell expansion. What follows is a + complex dance to accomodate everybody. + """ + + SQ = "'" + DQ = '"' + CM = "," + BS = "\\" + DQSQ = DQ+SQ+DQ + quotable = set(string.whitespace+'"$`\\') + + if CM not in arg or not listmode: + if SQ not in arg: + # If any of the special characters "$`\ or whitespace are present, single-quote. + # Otherwise return bare. + if set(arg).intersection(quotable): + return SQ+arg+SQ + else: + return arg # Easy case: no splits, no quoting. + else: + # There are single quotes. Double-quote them, and single-quote the + # strings between them. + l = [cls._shield_nvcc_list_arg(s) for s in arg.split(SQ)] + l = sum([[s, DQSQ] for s in l][:-1], []) # Interleave l with DQSQs + return ''.join(l) + else: + # A comma is present, and list mode was active. + # We apply (what we guess is) the (primitive) NVCC splitting rule: + l = [''] + instring = False + argit = iter(arg) + for c in argit: + if c == CM and not instring: + l.append('') + elif c == DQ: + l[-1] += c + instring = not instring + elif c == BS: + try: + l[-1] += next(argit) + except StopIteration: + break + else: + l[-1] += c + + # Shield individual strings, without listmode, then return them with + # escaped commas between them. + l = [cls._shield_nvcc_list_arg(s, listmode=False) for s in l] + return r'\,'.join(l) + + @classmethod + def _merge_flags(cls, flags: T.List[str]) -> T.List[str]: + r""" + The flags to NVCC gets exceedingly verbose and unreadable when too many of them + are shielded with -Xcompiler. Merge consecutive -Xcompiler-wrapped arguments + into one. + """ + if len(flags) <= 1: + return flags + flagit = iter(flags) + xflags = [] + + def is_xcompiler_flag_isolated(flag: str) -> bool: + return flag == '-Xcompiler' + def is_xcompiler_flag_glued(flag: str) -> bool: + return flag.startswith('-Xcompiler=') + def is_xcompiler_flag(flag: str) -> bool: + return is_xcompiler_flag_isolated(flag) or is_xcompiler_flag_glued(flag) + def get_xcompiler_val(flag: str, flagit: T.Iterator[str]) -> str: + if is_xcompiler_flag_glued(flag): + return flag[len('-Xcompiler='):] + else: + try: + return next(flagit) + except StopIteration: + return "" + + ingroup = False + for flag in flagit: + if not is_xcompiler_flag(flag): + ingroup = False + xflags.append(flag) + elif ingroup: + xflags[-1] += ',' + xflags[-1] += get_xcompiler_val(flag, flagit) + elif is_xcompiler_flag_isolated(flag): + ingroup = True + xflags.append(flag) + xflags.append(get_xcompiler_val(flag, flagit)) + elif is_xcompiler_flag_glued(flag): + ingroup = True + xflags.append(flag) + else: + raise ValueError("-Xcompiler flag merging failed, unknown argument form!") + return xflags + + @classmethod + def _to_host_flags(cls, flags: T.List[str], phase: _Phase = _Phase.COMPILER) -> T.List[str]: + """ + Translate generic "GCC-speak" plus particular "NVCC-speak" flags to NVCC flags. + + NVCC's "short" flags have broad similarities to the GCC standard, but have + gratuitous, irritating differences. + """ + + xflags = [] + flagit = iter(flags) + + for flag in flagit: + # The CUDA Toolkit Documentation, in 4.1. Command Option Types and Notation, + # specifies that NVCC does not parse the standard flags as GCC does. It has + # its own strategy, to wit: + # + # nvcc recognizes three types of command options: boolean options, single + # value options, and list options. + # + # Boolean options do not have an argument; they are either specified on a + # command line or not. Single value options must be specified at most once, + # and list options may be repeated. Examples of each of these option types + # are, respectively: --verbose (switch to verbose mode), --output-file + # (specify output file), and --include-path (specify include path). + # + # Single value options and list options must have arguments, which must + # follow the name of the option itself by either one of more spaces or an + # equals character. When a one-character short name such as -I, -l, and -L + # is used, the value of the option may also immediately follow the option + # itself without being seperated by spaces or an equal character. The + # individual values of list options may be separated by commas in a single + # instance of the option, or the option may be repeated, or any + # combination of these two cases. + # + # One strange consequence of this choice is that directory and filenames that + # contain commas (',') cannot be passed to NVCC (at least, not as easily as + # in GCC). Another strange consequence is that it is legal to supply flags + # such as + # + # -lpthread,rt,dl,util + # -l pthread,rt,dl,util + # -l=pthread,rt,dl,util + # + # and each of the above alternatives is equivalent to GCC-speak + # + # -lpthread -lrt -ldl -lutil + # -l pthread -l rt -l dl -l util + # -l=pthread -l=rt -l=dl -l=util + # + # *With the exception of commas in the name*, GCC-speak for these list flags + # is a strict subset of NVCC-speak, so we passthrough those flags. + # + # The -D macro-define flag is documented as somehow shielding commas from + # splitting a definition. Balanced parentheses, braces and single-quotes + # around the comma are not sufficient, but balanced double-quotes are. The + # shielding appears to work with -l, -I, -L flags as well, for instance. + # + # Since our goal is to replicate GCC-speak as much as possible, we check for + # commas in all list-arguments and shield them with double-quotes. We make + # an exception for -D (where this would be value-changing) and -U (because + # it isn't possible to define a macro with a comma in the name). + + if flag in cls._FLAG_PASSTHRU_NOARGS: + xflags.append(flag) + continue + + + # Handle breakup of flag-values into a flag-part and value-part. + if flag[:1] not in '-/': + # This is not a flag. It's probably a file input. Pass it through. + xflags.append(flag) + continue + elif flag[:1] == '/': + # This is ambiguously either an MVSC-style /switch or an absolute path + # to a file. For some magical reason the following works acceptably in + # both cases. + wrap = '"' if ',' in flag else '' + xflags.append(f'-X{phase.value}={wrap}{flag}{wrap}') + continue + elif len(flag) >= 2 and flag[0] == '-' and flag[1] in 'IDULlmOxmte': + # This is a single-letter short option. These options (with the + # exception of -o) are allowed to receive their argument with neither + # space nor = sign before them. Detect and separate them in that event. + if flag[2:3] == '': # -I something + try: + val = next(flagit) + except StopIteration: + pass + elif flag[2:3] == '=': # -I=something + val = flag[3:] + else: # -Isomething + val = flag[2:] + flag = flag[:2] # -I + elif flag in cls._FLAG_LONG2SHORT_WITHARGS or \ + flag in cls._FLAG_SHORT2LONG_WITHARGS: + # This is either -o or a multi-letter flag, and it is receiving its + # value isolated. + try: + val = next(flagit) # -o something + except StopIteration: + pass + elif flag.split('=',1)[0] in cls._FLAG_LONG2SHORT_WITHARGS or \ + flag.split('=',1)[0] in cls._FLAG_SHORT2LONG_WITHARGS: + # This is either -o or a multi-letter flag, and it is receiving its + # value after an = sign. + flag, val = flag.split('=',1) # -o=something + else: + # This is a flag, and it's foreign to NVCC. + # + # We do not know whether this GCC-speak flag takes an isolated + # argument. Assuming it does not (the vast majority indeed don't), + # wrap this argument in an -Xcompiler flag and send it down to NVCC. + if flag == '-ffast-math': + xflags.append('-use_fast_math') + xflags.append('-Xcompiler='+flag) + elif flag == '-fno-fast-math': + xflags.append('-ftz=false') + xflags.append('-prec-div=true') + xflags.append('-prec-sqrt=true') + xflags.append('-Xcompiler='+flag) + elif flag == '-freciprocal-math': + xflags.append('-prec-div=false') + xflags.append('-Xcompiler='+flag) + elif flag == '-fno-reciprocal-math': + xflags.append('-prec-div=true') + xflags.append('-Xcompiler='+flag) + else: + xflags.append('-Xcompiler='+cls._shield_nvcc_list_arg(flag)) + # The above should securely handle GCC's -Wl, -Wa, -Wp, arguments. + continue + + + assert val is not None # Should only trip if there is a missing argument. + + + # Take care of the various NVCC-supported flags that need special handling. + flag = cls._FLAG_LONG2SHORT_WITHARGS.get(flag,flag) + + if flag in {'-include','-isystem','-I','-L','-l'}: + # These flags are known to GCC, but list-valued in NVCC. They potentially + # require double-quoting to prevent NVCC interpreting the flags as lists + # when GCC would not have done so. + # + # We avoid doing this quoting for -D to avoid redefining macros and for + # -U because it isn't possible to define a macro with a comma in the name. + # -U with comma arguments is impossible in GCC-speak (and thus unambiguous + #in NVCC-speak, albeit unportable). + if len(flag) == 2: + xflags.append(flag+cls._shield_nvcc_list_arg(val)) + else: + xflags.append(flag) + xflags.append(cls._shield_nvcc_list_arg(val)) + elif flag == '-O': + # Handle optimization levels GCC knows about that NVCC does not. + if val == 'fast': + xflags.append('-O3') + xflags.append('-use_fast_math') + xflags.append('-Xcompiler') + xflags.append(flag+val) + elif val in {'s', 'g', 'z'}: + xflags.append('-Xcompiler') + xflags.append(flag+val) + else: + xflags.append(flag+val) + elif flag in {'-D', '-U', '-m', '-t'}: + xflags.append(flag+val) # For style, keep glued. + elif flag in {'-std'}: + xflags.append(flag+'='+val) # For style, keep glued. + else: + xflags.append(flag) + xflags.append(val) + + return cls._merge_flags(xflags) + + def needs_static_linker(self) -> bool: + return False + + def thread_link_flags(self, environment: 'Environment') -> T.List[str]: + return self._to_host_flags(self.host_compiler.thread_link_flags(environment), _Phase.LINKER) + + def sanity_check(self, work_dir: str, env: 'Environment') -> None: + mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist)) + mlog.debug('Is cross compiler: %s.' % str(self.is_cross)) + + sname = 'sanitycheckcuda.cu' + code = r''' + #include + #include + + __global__ void kernel (void) {} + + int main(void){ + struct cudaDeviceProp prop; + int count, i; + cudaError_t ret = cudaGetDeviceCount(&count); + if(ret != cudaSuccess){ + fprintf(stderr, "%d\n", (int)ret); + }else{ + for(i=0;i T.Tuple[bool, bool]: + if extra_args is None: + extra_args = [] + fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} + # Check if it's a C-like symbol + t = '''{prefix} + #include <{header}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + found, cached = self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies) + if found: + return True, cached + # Check if it's a class or a template + t = '''{prefix} + #include <{header}> + using {symbol}; + int main(void) {{ + return 0; + }}''' + return self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies) + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + std_key = OptionKey('std', machine=self.for_machine, lang=self.language) + ccbindir_key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language) + opts.update({ + std_key: coredata.UserComboOption('C++ language standard to use with CUDA', + ['none', 'c++03', 'c++11', 'c++14', 'c++17'], 'none'), + ccbindir_key: coredata.UserStringOption('CUDA non-default toolchain directory to use (-ccbin)', + ''), + }) + return opts + + def _to_host_compiler_options(self, options: 'KeyedOptionDictType') -> 'KeyedOptionDictType': + """ + Convert an NVCC Option set to a host compiler's option set. + """ + + # We must strip the -std option from the host compiler option set, as NVCC has + # its own -std flag that may not agree with the host compiler's. + host_options = {key: options.get(key, opt) for key, opt in self.host_compiler.get_options().items()} + std_key = OptionKey('std', machine=self.for_machine, lang=self.host_compiler.language) + overrides = {std_key: 'none'} + return OptionOverrideProxy(overrides, host_options) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = self.get_ccbin_args(options) + # On Windows, the version of the C++ standard used by nvcc is dictated by + # the combination of CUDA version and MSVC version; the --std= is thus ignored + # and attempting to use it will result in a warning: https://stackoverflow.com/a/51272091/741027 + if not is_windows(): + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--std=' + std.value) + + return args + self._to_host_flags(self.host_compiler.get_option_compile_args(self._to_host_compiler_options(options))) + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = self.get_ccbin_args(options) + return args + self._to_host_flags(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options)), _Phase.LINKER) + + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str], + is_shared_module: bool) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_soname_args( + env, prefix, shlib_name, suffix, soversion, darwin_versions, + is_shared_module), _Phase.LINKER) + + def get_compile_only_args(self) -> T.List[str]: + return ['-c'] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + # alternatively, consider simply redirecting this to the host compiler, which would + # give us more control over options like "optimize for space" (which nvcc doesn't support): + # return self._to_host_flags(self.host_compiler.get_optimization_args(optimization_level)) + return cuda_optimization_args[optimization_level] + + def sanitizer_compile_args(self, value: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.sanitizer_compile_args(value)) + + def sanitizer_link_args(self, value: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.sanitizer_link_args(value)) + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return cuda_debug_args[is_debug] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror=cross-execution-space-call,deprecated-declarations,reorder'] + + def get_warn_args(self, level: str) -> T.List[str]: + return self.warn_args[level] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # nvcc doesn't support msvc's "Edit and Continue" PDB format; "downgrade" to + # a regular PDB to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi') + host_args = ['/Zi' if arg == '/ZI' else arg for arg in self.host_compiler.get_buildtype_args(buildtype)] + return cuda_buildtype_args[buildtype] + self._to_host_flags(host_args) + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-isystem=' + path] if is_system else ['-I' + path] + + def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_compile_debugfile_args(rel_obj, pch)) + + def get_link_debugfile_args(self, targetfile: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_link_debugfile_args(targetfile), _Phase.LINKER) + + def get_depfile_suffix(self) -> str: + return 'd' + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_buildtype_linker_args(buildtype), _Phase.LINKER) + + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: str, build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + return (self._to_host_flags(rpath_args, _Phase.LINKER), rpath_dirs_to_remove) + + def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]: + return args + + def get_pic_args(self) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_pic_args()) + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + return [] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-o', target] + + def get_std_exe_link_args(self) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), _Phase.LINKER) + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: + return ['-l' + libname] # FIXME + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype)) + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # nvcc defaults to static, release version of msvc runtime and provides no + # native option to override it; override it with /NODEFAULTLIB + host_link_arg_overrides = [] + host_crt_compile_args = self.host_compiler.get_crt_compile_args(crt_val, buildtype) + if any(arg in ['/MDd', '/MD', '/MTd'] for arg in host_crt_compile_args): + host_link_arg_overrides += ['/NODEFAULTLIB:LIBCMT.lib'] + return self._to_host_flags(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype), _Phase.LINKER) + + def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]: + return self._to_host_flags(super().get_target_link_args(target), _Phase.LINKER) + + def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: + return self._to_host_flags(super().get_dependency_compile_args(dep)) + + def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: + return self._to_host_flags(super().get_dependency_link_args(dep), _Phase.LINKER) + + def get_ccbin_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language) + ccbindir = options[key].value + if isinstance(ccbindir, str) and ccbindir != '': + return [self._shield_nvcc_list_arg('-ccbin='+ccbindir, False)] + else: + return [] diff --git a/meson/mesonbuild/compilers/cython.py b/meson/mesonbuild/compilers/cython.py new file mode 100644 index 000000000..513f07995 --- /dev/null +++ b/meson/mesonbuild/compilers/cython.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2021 Intel Corporation + +"""Abstraction for Cython language compilers.""" + +import typing as T + +from .. import coredata +from ..mesonlib import EnvironmentException, OptionKey +from .compilers import Compiler + +if T.TYPE_CHECKING: + from ..coredata import KeyedOptionDictType + from ..environment import Environment + + +class CythonCompiler(Compiler): + + """Cython Compiler.""" + + language = 'cython' + id = 'cython' + + def needs_static_linker(self) -> bool: + # We transpile into C, so we don't need any linker + return False + + def get_always_args(self) -> T.List[str]: + return ['--fast-fail'] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-o', outputname] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + # Cython doesn't have optimization levels itself, the underlying + # compiler might though + return [] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'print("hello world")' + with self.cached_compile(code, environment.coredata) as p: + if p.returncode != 0: + raise EnvironmentException(f'Cython compiler {self.id!r} cannot compile programs') + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # Cython doesn't implement this, but Meson requires an implementation + return [] + + def get_pic_args(self) -> T.List[str]: + # We can lie here, it's fine + return [] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + new: T.List[str] = [] + for i in parameter_list: + new.append(i) + + return new + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + opts.update({ + OptionKey('version', machine=self.for_machine, lang=self.language): coredata.UserComboOption( + 'Python version to target', + ['2', '3'], + '3', + ) + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = options[OptionKey('version', machine=self.for_machine, lang=self.language)] + args.append(f'-{key.value}') + return args diff --git a/meson/mesonbuild/compilers/d.py b/meson/mesonbuild/compilers/d.py new file mode 100644 index 000000000..b5ec905c2 --- /dev/null +++ b/meson/mesonbuild/compilers/d.py @@ -0,0 +1,906 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import re +import subprocess +import typing as T + +from ..mesonlib import ( + EnvironmentException, MachineChoice, version_compare, OptionKey, is_windows +) + +from ..arglist import CompilerArgs +from ..linkers import RSPFileSyntax +from .compilers import ( + d_dmd_buildtype_args, + d_gdc_buildtype_args, + d_ldc_buildtype_args, + clike_debug_args, + Compiler, +) +from .mixins.gnu import GnuCompiler + +if T.TYPE_CHECKING: + from .compilers import Compiler as CompilerMixinBase + from ..programs import ExternalProgram + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker +else: + CompilerMixinBase = object + +d_feature_args = {'gcc': {'unittest': '-funittest', + 'debug': '-fdebug', + 'version': '-fversion', + 'import_dir': '-J' + }, + 'llvm': {'unittest': '-unittest', + 'debug': '-d-debug', + 'version': '-d-version', + 'import_dir': '-J' + }, + 'dmd': {'unittest': '-unittest', + 'debug': '-debug', + 'version': '-version', + 'import_dir': '-J' + } + } # type: T.Dict[str, T.Dict[str, str]] + +ldc_optimization_args = {'0': [], + 'g': [], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Os'], + } # type: T.Dict[str, T.List[str]] + +dmd_optimization_args = {'0': [], + 'g': [], + '1': ['-O'], + '2': ['-O'], + '3': ['-O'], + 's': ['-O'], + } # type: T.Dict[str, T.List[str]] + + +class DmdLikeCompilerMixin(CompilerMixinBase): + + """Mixin class for DMD and LDC. + + LDC has a number of DMD like arguments, and this class allows for code + sharing between them as makes sense. + """ + + def __init__(self, dmd_frontend_version: T.Optional[str]): + if dmd_frontend_version is None: + self._dmd_has_depfile = False + else: + # -makedeps switch introduced in 2.095 frontend + self._dmd_has_depfile = version_compare(dmd_frontend_version, ">=2.095.0") + + if T.TYPE_CHECKING: + mscrt_args = {} # type: T.Dict[str, T.List[str]] + + def _get_target_arch_args(self) -> T.List[str]: ... + + LINKER_PREFIX = '-L=' + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-of=' + outputname] + + def get_linker_output_args(self, outputname: str) -> T.List[str]: + return ['-of=' + outputname] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + return ['-I=' + path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:3] == '-I=': + parameter_list[idx] = i[:3] + os.path.normpath(os.path.join(build_dir, i[3:])) + if i[:4] == '-L-L': + parameter_list[idx] = i[:4] + os.path.normpath(os.path.join(build_dir, i[4:])) + if i[:5] == '-L=-L': + parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:])) + if i[:6] == '-Wl,-L': + parameter_list[idx] = i[:6] + os.path.normpath(os.path.join(build_dir, i[6:])) + + return parameter_list + + def get_warn_args(self, level: str) -> T.List[str]: + return ['-wi'] + + def get_werror_args(self) -> T.List[str]: + return ['-w'] + + def get_coverage_args(self) -> T.List[str]: + return ['-cov'] + + def get_coverage_link_args(self) -> T.List[str]: + return [] + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-E'] + + def get_compile_only_args(self) -> T.List[str]: + return ['-c'] + + def get_depfile_suffix(self) -> str: + return 'deps' + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + if self._dmd_has_depfile: + return [f'-makedeps={outfile}'] + return [] + + def get_pic_args(self) -> T.List[str]: + if self.info.is_windows(): + return [] + return ['-fPIC'] + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + # TODO: using a TypeDict here would improve this + res = [] + # get_feature_args can be called multiple times for the same target when there is generated source + # so we have to copy the kwargs (target.d_features) dict before popping from it + kwargs = kwargs.copy() + if 'unittest' in kwargs: + unittest = kwargs.pop('unittest') + unittest_arg = d_feature_args[self.id]['unittest'] + if not unittest_arg: + raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string()) + if unittest: + res.append(unittest_arg) + + if 'debug' in kwargs: + debug_level = -1 + debugs = kwargs.pop('debug') + if not isinstance(debugs, list): + debugs = [debugs] + + debug_arg = d_feature_args[self.id]['debug'] + if not debug_arg: + raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string()) + + # Parse all debug identifiers and the largest debug level identifier + for d in debugs: + if isinstance(d, int): + if d > debug_level: + debug_level = d + elif isinstance(d, str) and d.isdigit(): + if int(d) > debug_level: + debug_level = int(d) + else: + res.append(f'{debug_arg}={d}') + + if debug_level >= 0: + res.append(f'{debug_arg}={debug_level}') + + if 'versions' in kwargs: + version_level = -1 + versions = kwargs.pop('versions') + if not isinstance(versions, list): + versions = [versions] + + version_arg = d_feature_args[self.id]['version'] + if not version_arg: + raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string()) + + # Parse all version identifiers and the largest version level identifier + for v in versions: + if isinstance(v, int): + if v > version_level: + version_level = v + elif isinstance(v, str) and v.isdigit(): + if int(v) > version_level: + version_level = int(v) + else: + res.append(f'{version_arg}={v}') + + if version_level >= 0: + res.append(f'{version_arg}={version_level}') + + if 'import_dirs' in kwargs: + import_dirs = kwargs.pop('import_dirs') + if not isinstance(import_dirs, list): + import_dirs = [import_dirs] + + import_dir_arg = d_feature_args[self.id]['import_dir'] + if not import_dir_arg: + raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string()) + for idir_obj in import_dirs: + basedir = idir_obj.get_curdir() + for idir in idir_obj.get_incdirs(): + bldtreedir = os.path.join(basedir, idir) + # Avoid superfluous '/.' at the end of paths when d is '.' + if idir not in ('', '.'): + expdir = bldtreedir + else: + expdir = basedir + srctreedir = os.path.join(build_to_src, expdir) + res.append(f'{import_dir_arg}{srctreedir}') + res.append(f'{import_dir_arg}{bldtreedir}') + + if kwargs: + raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) + + return res + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + return [] + + def gen_import_library_args(self, implibname: str) -> T.List[str]: + return self.linker.import_library_args(implibname) + + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: str, build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + if self.info.is_windows(): + return ([], set()) + + # GNU ld, solaris ld, and lld acting like GNU ld + if self.linker.id.startswith('ld'): + # The way that dmd and ldc pass rpath to gcc is different than we would + # do directly, each argument -rpath and the value to rpath, need to be + # split into two separate arguments both prefaced with the -L=. + args = [] + (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + for r in rpath_args: + if ',' in r: + a, b = r.split(',', maxsplit=1) + args.append(a) + args.append(self.LINKER_PREFIX + b) + else: + args.append(r) + return (args, rpath_dirs_to_remove) + + return super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + + def _translate_args_to_nongnu(self, args: T.List[str]) -> T.List[str]: + # Translate common arguments to flags the LDC/DMD compilers + # can understand. + # The flags might have been added by pkg-config files, + # and are therefore out of the user's control. + dcargs = [] + # whether we hit a linker argument that expect another arg + # see the comment in the "-L" section + link_expect_arg = False + link_flags_with_arg = [ + '-rpath', '-soname', '-compatibility_version', '-current_version', + ] + for arg in args: + # Translate OS specific arguments first. + osargs = [] # type: T.List[str] + if self.info.is_windows(): + osargs = self.translate_arg_to_windows(arg) + elif self.info.is_darwin(): + osargs = self._translate_arg_to_osx(arg) + if osargs: + dcargs.extend(osargs) + continue + + # Translate common D arguments here. + if arg == '-pthread': + continue + if arg.startswith('-fstack-protector'): + continue + if arg.startswith('-D'): + continue + if arg.startswith('-Wl,'): + # Translate linker arguments here. + linkargs = arg[arg.index(',') + 1:].split(',') + for la in linkargs: + dcargs.append('-L=' + la.strip()) + continue + elif arg.startswith(('-link-defaultlib', '-linker', '-link-internally', '-linkonce-templates', '-lib')): + # these are special arguments to the LDC linker call, + # arguments like "-link-defaultlib-shared" do *not* + # denote a library to be linked, but change the default + # Phobos/DRuntime linking behavior, while "-linker" sets the + # default linker. + dcargs.append(arg) + continue + elif arg.startswith('-l'): + # translate library link flag + dcargs.append('-L=' + arg) + continue + elif arg.startswith('-isystem'): + # translate -isystem system include path + # this flag might sometimes be added by C library Cflags via + # pkg-config. + # NOTE: -isystem and -I are not 100% equivalent, so this is just + # a workaround for the most common cases. + if arg.startswith('-isystem='): + dcargs.append('-I=' + arg[9:]) + else: + dcargs.append('-I' + arg[8:]) + continue + elif arg.startswith('-idirafter'): + # same as -isystem, but appends the path instead + if arg.startswith('-idirafter='): + dcargs.append('-I=' + arg[11:]) + else: + dcargs.append('-I' + arg[10:]) + continue + elif arg.startswith('-L'): + # The D linker expect library search paths in the form of -L=-L/path (the '=' is optional). + # + # This function receives a mix of arguments already prepended + # with -L for the D linker driver and other linker arguments. + # The arguments starting with -L can be: + # - library search path (with or without a second -L) + # - it can come from pkg-config (a single -L) + # - or from the user passing linker flags (-L-L would be expected) + # - arguments like "-L=-rpath" that expect a second argument (also prepended with -L) + # - arguments like "-L=@rpath/xxx" without a second argument (on Apple platform) + # - arguments like "-L=/SUBSYSTEM:CONSOLE (for Windows linker) + # + # The logic that follows trys to detect all these cases (some may be missing) + # in order to prepend a -L only for the library search paths with a single -L + + if arg.startswith('-L='): + suffix = arg[3:] + else: + suffix = arg[2:] + + if link_expect_arg: + # flags like rpath and soname expect a path or filename respectively, + # we must not alter it (i.e. prefixing with -L for a lib search path) + dcargs.append(arg) + link_expect_arg = False + continue + + if suffix in link_flags_with_arg: + link_expect_arg = True + + if suffix.startswith('-') or suffix.startswith('@'): + # this is not search path + dcargs.append(arg) + continue + + # linker flag such as -L=/DEBUG must pass through + if self.linker.id == 'link' and self.info.is_windows() and suffix.startswith('/'): + dcargs.append(arg) + continue + + # Make sure static library files are passed properly to the linker. + if arg.endswith('.a') or arg.endswith('.lib'): + if len(suffix) > 0 and not suffix.startswith('-'): + dcargs.append('-L=' + suffix) + continue + + dcargs.append('-L=' + arg) + continue + elif not arg.startswith('-') and arg.endswith(('.a', '.lib')): + # ensure static libraries are passed through to the linker + dcargs.append('-L=' + arg) + continue + else: + dcargs.append(arg) + + return dcargs + + @classmethod + def translate_arg_to_windows(cls, arg: str) -> T.List[str]: + args = [] + if arg.startswith('-Wl,'): + # Translate linker arguments here. + linkargs = arg[arg.index(',') + 1:].split(',') + for la in linkargs: + if la.startswith('--out-implib='): + # Import library name + args.append('-L=/IMPLIB:' + la[13:].strip()) + elif arg.startswith('-mscrtlib='): + args.append(arg) + mscrtlib = arg[10:].lower() + if cls is LLVMDCompiler: + # Default crt libraries for LDC2 must be excluded for other + # selected crt options. + if mscrtlib != 'libcmt': + args.append('-L=/NODEFAULTLIB:libcmt') + args.append('-L=/NODEFAULTLIB:libvcruntime') + + # Fixes missing definitions for printf-functions in VS2017 + if mscrtlib.startswith('msvcrt'): + args.append('-L=/DEFAULTLIB:legacy_stdio_definitions.lib') + + return args + + @classmethod + def _translate_arg_to_osx(cls, arg: str) -> T.List[str]: + args = [] + if arg.startswith('-install_name'): + args.append('-L=' + arg) + return args + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + ddebug_args = [] + if is_debug: + ddebug_args = [d_feature_args[self.id]['debug']] + + return clike_debug_args[is_debug] + ddebug_args + + def _get_crt_args(self, crt_val: str, buildtype: str) -> T.List[str]: + if not self.info.is_windows(): + return [] + + if crt_val in self.mscrt_args: + return self.mscrt_args[crt_val] + assert(crt_val in ['from_buildtype', 'static_from_buildtype']) + + dbg = 'mdd' + rel = 'md' + if crt_val == 'static_from_buildtype': + dbg = 'mtd' + rel = 'mt' + + # Match what build type flags used to do. + if buildtype == 'plain': + return [] + elif buildtype == 'debug': + return self.mscrt_args[dbg] + elif buildtype == 'debugoptimized': + return self.mscrt_args[rel] + elif buildtype == 'release': + return self.mscrt_args[rel] + elif buildtype == 'minsize': + return self.mscrt_args[rel] + else: + assert(buildtype == 'custom') + raise EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".') + + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str], + is_shared_module: bool) -> T.List[str]: + sargs = super().get_soname_args(env, prefix, shlib_name, suffix, + soversion, darwin_versions, is_shared_module) + + # LDC and DMD actually do use a linker, but they proxy all of that with + # their own arguments + if self.linker.id.startswith('ld.'): + soargs = [] + for arg in sargs: + a, b = arg.split(',', maxsplit=1) + soargs.append(a) + soargs.append(self.LINKER_PREFIX + b) + return soargs + elif self.linker.id.startswith('ld64'): + soargs = [] + for arg in sargs: + if not arg.startswith(self.LINKER_PREFIX): + soargs.append(self.LINKER_PREFIX + arg) + else: + soargs.append(arg) + return soargs + else: + return sargs + + def get_allow_undefined_link_args(self) -> T.List[str]: + args = self.linker.get_allow_undefined_args() + if self.info.is_darwin(): + # On macOS we're passing these options to the C compiler, but + # they're linker options and need -Wl, so clang/gcc knows what to + # do with them. I'm assuming, but don't know for certain, that + # ldc/dmd do some kind of mapping internally for arguments they + # understand, but pass arguments they don't understand directly. + args = [a.replace('-L=', '-Xcc=-Wl,') for a in args] + return args + + +class DCompilerArgs(CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', ) + + +class DCompiler(Compiler): + mscrt_args = { + 'none': ['-mscrtlib='], + 'md': ['-mscrtlib=msvcrt'], + 'mdd': ['-mscrtlib=msvcrtd'], + 'mt': ['-mscrtlib=libcmt'], + 'mtd': ['-mscrtlib=libcmtd'], + } + + language = 'd' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + super().__init__(exelist, version, for_machine, info, linker=linker, + full_version=full_version, is_cross=is_cross) + self.arch = arch + self.exe_wrapper = exe_wrapper + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + source_name = os.path.join(work_dir, 'sanity.d') + output_name = os.path.join(work_dir, 'dtest') + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write('''void main() { }''') + pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self._get_target_arch_args() + [source_name], cwd=work_dir) + pc.wait() + if pc.returncode != 0: + raise EnvironmentException('D compiler %s can not compile programs.' % self.name_string()) + if self.is_cross: + if self.exe_wrapper is None: + # Can't check if the binaries run so we have to assume they do + return + cmdlist = self.exe_wrapper.get_command() + [output_name] + else: + cmdlist = [output_name] + if subprocess.call(cmdlist) != 0: + raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string()) + + def needs_static_linker(self) -> bool: + return True + + def get_depfile_suffix(self) -> str: + return 'deps' + + def get_pic_args(self) -> T.List[str]: + if self.info.is_windows(): + return [] + return ['-fPIC'] + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + # TODO: using a TypeDict here would improve this + res = [] + # get_feature_args can be called multiple times for the same target when there is generated source + # so we have to copy the kwargs (target.d_features) dict before popping from it + kwargs = kwargs.copy() + if 'unittest' in kwargs: + unittest = kwargs.pop('unittest') + unittest_arg = d_feature_args[self.id]['unittest'] + if not unittest_arg: + raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string()) + if unittest: + res.append(unittest_arg) + + if 'debug' in kwargs: + debug_level = -1 + debugs = kwargs.pop('debug') + if not isinstance(debugs, list): + debugs = [debugs] + + debug_arg = d_feature_args[self.id]['debug'] + if not debug_arg: + raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string()) + + # Parse all debug identifiers and the largest debug level identifier + for d in debugs: + if isinstance(d, int): + if d > debug_level: + debug_level = d + elif isinstance(d, str) and d.isdigit(): + if int(d) > debug_level: + debug_level = int(d) + else: + res.append(f'{debug_arg}={d}') + + if debug_level >= 0: + res.append(f'{debug_arg}={debug_level}') + + if 'versions' in kwargs: + version_level = -1 + versions = kwargs.pop('versions') + if not isinstance(versions, list): + versions = [versions] + + version_arg = d_feature_args[self.id]['version'] + if not version_arg: + raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string()) + + # Parse all version identifiers and the largest version level identifier + for v in versions: + if isinstance(v, int): + if v > version_level: + version_level = v + elif isinstance(v, str) and v.isdigit(): + if int(v) > version_level: + version_level = int(v) + else: + res.append(f'{version_arg}={v}') + + if version_level >= 0: + res.append(f'{version_arg}={version_level}') + + if 'import_dirs' in kwargs: + import_dirs = kwargs.pop('import_dirs') + if not isinstance(import_dirs, list): + import_dirs = [import_dirs] + + import_dir_arg = d_feature_args[self.id]['import_dir'] + if not import_dir_arg: + raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string()) + for idir_obj in import_dirs: + basedir = idir_obj.get_curdir() + for idir in idir_obj.get_incdirs(): + bldtreedir = os.path.join(basedir, idir) + # Avoid superfluous '/.' at the end of paths when d is '.' + if idir not in ('', '.'): + expdir = bldtreedir + else: + expdir = basedir + srctreedir = os.path.join(build_to_src, expdir) + res.append(f'{import_dir_arg}{srctreedir}') + res.append(f'{import_dir_arg}{bldtreedir}') + + if kwargs: + raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) + + return res + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + return [] + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> DCompilerArgs: + return DCompilerArgs(self, args) + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self.compiles('int i;\n', env, extra_args=args) + + def _get_target_arch_args(self) -> T.List[str]: + # LDC2 on Windows targets to current OS architecture, but + # it should follow the target specified by the MSVC toolchain. + if self.info.is_windows(): + if self.arch == 'x86_64': + return ['-m64'] + return ['-m32'] + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + +class GnuDCompiler(GnuCompiler, DCompiler): + + # we mostly want DCompiler, but that gives us the Compiler.LINKER_PREFIX instead + LINKER_PREFIX = GnuCompiler.LINKER_PREFIX + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) + GnuCompiler.__init__(self, {}) + self.id = 'gcc' + default_warn_args = ['-Wall', '-Wdeprecated'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + self.base_options = { + OptionKey(o) for o in [ + 'b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt', + 'b_coverage', 'b_pgo', 'b_ndebug']} + + self._has_color_support = version_compare(self.version, '>=4.9') + # dependencies were implemented before, but broken - support was fixed in GCC 7.1+ + # (and some backported versions) + self._has_deps_support = version_compare(self.version, '>=7.1') + + def get_colorout_args(self, colortype: str) -> T.List[str]: + if self._has_color_support: + super().get_colorout_args(colortype) + return [] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + if self._has_deps_support: + return super().get_dependency_gen_args(outtarget, outfile) + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return self.warn_args[level] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return d_gdc_buildtype_args[buildtype] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list + + def get_allow_undefined_link_args(self) -> T.List[str]: + return self.linker.get_allow_undefined_args() + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-shared-libphobos'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['-frelease'] + +# LDC uses the DMD frontend code to parse and analyse the code. +# It then uses LLVM for the binary code generation and optimizations. +# This function retrieves the dmd frontend version, which determines +# the common features between LDC and DMD. +# We need the complete version text because the match is not on first line +# of version_output +def find_ldc_dmd_frontend_version(version_output: T.Optional[str]) -> T.Optional[str]: + if version_output is None: + return None + version_regex = re.search(r'DMD v(\d+\.\d+\.\d+)', version_output) + if version_regex: + return version_regex.group(1) + return None + +class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False, version_output: T.Optional[str] = None): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) + DmdLikeCompilerMixin.__init__(self, dmd_frontend_version=find_ldc_dmd_frontend_version(version_output)) + self.id = 'llvm' + self.base_options = {OptionKey(o) for o in ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']} + + def get_colorout_args(self, colortype: str) -> T.List[str]: + if colortype == 'always': + return ['-enable-color'] + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + if level in {'2', '3'}: + return ['-wi', '-dw'] + elif level == '1': + return ['-wi'] + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + d_ldc_buildtype_args[buildtype] + return d_ldc_buildtype_args[buildtype] + + def get_pic_args(self) -> T.List[str]: + return ['-relocation-model=pic'] + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._get_crt_args(crt_val, buildtype) + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._translate_args_to_nongnu(args) + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return ldc_optimization_args[optimization_level] + + @classmethod + def use_linker_args(cls, linker: str) -> T.List[str]: + return [f'-linker={linker}'] + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-link-defaultlib-shared'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['--release'] + + def rsp_file_syntax(self) -> RSPFileSyntax: + # We use `mesonlib.is_windows` here because we want to konw what the + # build machine is, not the host machine. This really means whe whould + # have the Environment not the MachineInfo in the compiler. + return RSPFileSyntax.MSVC if is_windows() else RSPFileSyntax.GCC + + +class DmdDCompiler(DmdLikeCompilerMixin, DCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) + DmdLikeCompilerMixin.__init__(self, version) + self.id = 'dmd' + self.base_options = {OptionKey(o) for o in ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']} + + def get_colorout_args(self, colortype: str) -> T.List[str]: + if colortype == 'always': + return ['-color=on'] + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + d_dmd_buildtype_args[buildtype] + return d_dmd_buildtype_args[buildtype] + + def get_std_exe_link_args(self) -> T.List[str]: + if self.info.is_windows(): + # DMD links against D runtime only when main symbol is found, + # so these needs to be inserted when linking static D libraries. + if self.arch == 'x86_64': + return ['phobos64.lib'] + elif self.arch == 'x86_mscoff': + return ['phobos32mscoff.lib'] + return ['phobos.lib'] + return [] + + def get_std_shared_lib_link_args(self) -> T.List[str]: + libname = 'libphobos2.so' + if self.info.is_windows(): + if self.arch == 'x86_64': + libname = 'phobos64.lib' + elif self.arch == 'x86_mscoff': + libname = 'phobos32mscoff.lib' + else: + libname = 'phobos.lib' + return ['-shared', '-defaultlib=' + libname] + + def _get_target_arch_args(self) -> T.List[str]: + # DMD32 and DMD64 on 64-bit Windows defaults to 32-bit (OMF). + # Force the target to 64-bit in order to stay consistent + # across the different platforms. + if self.info.is_windows(): + if self.arch == 'x86_64': + return ['-m64'] + elif self.arch == 'x86_mscoff': + return ['-m32mscoff'] + return ['-m32'] + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._get_crt_args(crt_val, buildtype) + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._translate_args_to_nongnu(args) + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return dmd_optimization_args[optimization_level] + + def can_linker_accept_rsp(self) -> bool: + return False + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-defaultlib=phobos2', '-debuglib=phobos2'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['-release'] + + def rsp_file_syntax(self) -> RSPFileSyntax: + return RSPFileSyntax.MSVC diff --git a/meson/mesonbuild/compilers/detect.py b/meson/mesonbuild/compilers/detect.py new file mode 100644 index 000000000..22cf43b6d --- /dev/null +++ b/meson/mesonbuild/compilers/detect.py @@ -0,0 +1,1219 @@ +# Copyright 2012-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ..mesonlib import ( + MachineChoice, MesonException, EnvironmentException, + search_version, is_windows, Popen_safe, windows_proof_rm, +) +from ..envconfig import BinaryTable +from .. import mlog + +from ..linkers import ( + guess_win_linker, + guess_nix_linker, + AIXArLinker, + ArLinker, + ArmarLinker, + ArmClangDynamicLinker, + ArmDynamicLinker, + CcrxLinker, + CcrxDynamicLinker, + CompCertLinker, + CompCertDynamicLinker, + C2000Linker, + C2000DynamicLinker, + DLinker, + NvidiaHPC_DynamicLinker, + PGIDynamicLinker, + PGIStaticLinker, + StaticLinker, + Xc16Linker, + Xc16DynamicLinker, + XilinkDynamicLinker, + CudaLinker, + IntelVisualStudioLinker, + VisualStudioLinker, + VisualStudioLikeLinkerMixin, + WASMDynamicLinker, +) +from .compilers import Compiler +from .c import ( + CCompiler, + AppleClangCCompiler, + ArmCCompiler, + ArmclangCCompiler, + ClangCCompiler, + ClangClCCompiler, + GnuCCompiler, + ElbrusCCompiler, + EmscriptenCCompiler, + IntelCCompiler, + IntelClCCompiler, + NvidiaHPC_CCompiler, + PGICCompiler, + CcrxCCompiler, + Xc16CCompiler, + CompCertCCompiler, + C2000CCompiler, + VisualStudioCCompiler, +) +from .cpp import ( + CPPCompiler, + AppleClangCPPCompiler, + ArmCPPCompiler, + ArmclangCPPCompiler, + ClangCPPCompiler, + ClangClCPPCompiler, + GnuCPPCompiler, + ElbrusCPPCompiler, + EmscriptenCPPCompiler, + IntelCPPCompiler, + IntelClCPPCompiler, + NvidiaHPC_CPPCompiler, + PGICPPCompiler, + CcrxCPPCompiler, + C2000CPPCompiler, + VisualStudioCPPCompiler, +) +from .cs import MonoCompiler, VisualStudioCsCompiler +from .d import ( + DCompiler, + DmdDCompiler, + GnuDCompiler, + LLVMDCompiler, +) +from .cuda import CudaCompiler +from .fortran import ( + FortranCompiler, + G95FortranCompiler, + GnuFortranCompiler, + ElbrusFortranCompiler, + FlangFortranCompiler, + IntelFortranCompiler, + IntelClFortranCompiler, + NAGFortranCompiler, + Open64FortranCompiler, + PathScaleFortranCompiler, + NvidiaHPC_FortranCompiler, + PGIFortranCompiler, + SunFortranCompiler, +) +from .java import JavaCompiler +from .objc import ( + ObjCCompiler, + AppleClangObjCCompiler, + ClangObjCCompiler, + GnuObjCCompiler, +) +from .objcpp import ( + ObjCPPCompiler, + AppleClangObjCPPCompiler, + ClangObjCPPCompiler, + GnuObjCPPCompiler, +) +from .cython import CythonCompiler +from .rust import RustCompiler +from .swift import SwiftCompiler +from .vala import ValaCompiler +from .mixins.visualstudio import VisualStudioLikeCompiler +from .mixins.gnu import GnuCompiler +from .mixins.clang import ClangCompiler + +import subprocess +import platform +import re +import shutil +import tempfile +import os +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + from ..programs import ExternalProgram + from .compilers import CompilerType + + + +# Default compilers and linkers +# ============================= + +defaults: T.Dict[str, T.List[str]] = {} + +# List of potential compilers. +if is_windows(): + # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere. + # Search for icl before cl, since Intel "helpfully" provides a + # cl.exe that returns *exactly the same thing* that microsofts + # cl.exe does, and if icl is present, it's almost certainly what + # you want. + defaults['c'] = ['icl', 'cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc'] + # There is currently no pgc++ for Windows, only for Mac and Linux. + defaults['cpp'] = ['icl', 'cl', 'c++', 'g++', 'clang++', 'clang-cl'] + defaults['fortran'] = ['ifort', 'gfortran', 'flang', 'pgfortran', 'g95'] + # Clang and clang++ are valid, but currently unsupported. + defaults['objc'] = ['cc', 'gcc'] + defaults['objcpp'] = ['c++', 'g++'] + defaults['cs'] = ['csc', 'mcs'] +else: + if platform.machine().lower() == 'e2k': + # There are no objc or objc++ compilers for Elbrus, + # and there's no clang which can build binaries for host. + defaults['c'] = ['cc', 'gcc', 'lcc'] + defaults['cpp'] = ['c++', 'g++', 'l++'] + defaults['objc'] = [] + defaults['objcpp'] = [] + else: + defaults['c'] = ['cc', 'gcc', 'clang', 'nvc', 'pgcc', 'icc'] + defaults['cpp'] = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc'] + defaults['objc'] = ['cc', 'gcc', 'clang'] + defaults['objcpp'] = ['c++', 'g++', 'clang++'] + defaults['fortran'] = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'g95'] + defaults['cs'] = ['mcs', 'csc'] +defaults['d'] = ['ldc2', 'ldc', 'gdc', 'dmd'] +defaults['java'] = ['javac'] +defaults['cuda'] = ['nvcc'] +defaults['rust'] = ['rustc'] +defaults['swift'] = ['swiftc'] +defaults['vala'] = ['valac'] +defaults['cython'] = ['cython'] +defaults['static_linker'] = ['ar', 'gar'] +defaults['strip'] = ['strip'] +defaults['vs_static_linker'] = ['lib'] +defaults['clang_cl_static_linker'] = ['llvm-lib'] +defaults['cuda_static_linker'] = ['nvlink'] +defaults['gcc_static_linker'] = ['gcc-ar'] +defaults['clang_static_linker'] = ['llvm-ar'] + + +def compiler_from_language(env: 'Environment', lang: str, for_machine: MachineChoice) -> T.Optional[Compiler]: + lang_map: T.Dict[str, T.Callable[['Environment', MachineChoice], Compiler]] = { + 'c': detect_c_compiler, + 'cpp': detect_cpp_compiler, + 'objc': detect_objc_compiler, + 'cuda': detect_cuda_compiler, + 'objcpp': detect_objcpp_compiler, + 'java': detect_java_compiler, + 'cs': detect_cs_compiler, + 'vala': detect_vala_compiler, + 'd': detect_d_compiler, + 'rust': detect_rust_compiler, + 'fortran': detect_fortran_compiler, + 'swift': detect_swift_compiler, + 'cython': detect_cython_compiler, + } + return lang_map[lang](env, for_machine) if lang in lang_map else None + +def detect_compiler_for(env: 'Environment', lang: str, for_machine: MachineChoice)-> T.Optional[Compiler]: + comp = compiler_from_language(env, lang, for_machine) + if comp is not None: + assert comp.for_machine == for_machine + env.coredata.process_new_compiler(lang, comp, env) + return comp + + +# Helpers +# ======= + +def _get_compilers(env: 'Environment', lang: str, for_machine: MachineChoice) -> T.Tuple[T.List[T.List[str]], T.List[str], T.Optional['ExternalProgram']]: + ''' + The list of compilers is detected in the exact same way for + C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here. + ''' + value = env.lookup_binary_entry(for_machine, lang) + if value is not None: + comp, ccache = BinaryTable.parse_entry(value) + # Return value has to be a list of compiler 'choices' + compilers = [comp] + else: + if not env.machines.matches_build_machine(for_machine): + raise EnvironmentException(f'{lang!r} compiler binary not defined in cross or native file') + compilers = [[x] for x in defaults[lang]] + ccache = BinaryTable.detect_ccache() + + if env.machines.matches_build_machine(for_machine): + exe_wrap: T.Optional[ExternalProgram] = None + else: + exe_wrap = env.get_exe_wrapper() + + return compilers, ccache, exe_wrap + +def _handle_exceptions( + exceptions: T.Mapping[str, T.Union[Exception, str]], + binaries: T.List[T.List[str]], + bintype: str = 'compiler' + ) -> T.NoReturn: + errmsg = f'Unknown {bintype}(s): {binaries}' + if exceptions: + errmsg += '\nThe following exception(s) were encountered:' + for c, e in exceptions.items(): + errmsg += f'\nRunning "{c}" gave "{e}"' + raise EnvironmentException(errmsg) + + +# Linker specific +# =============== + +def detect_static_linker(env: 'Environment', compiler: Compiler) -> StaticLinker: + linker = env.lookup_binary_entry(compiler.for_machine, 'ar') + if linker is not None: + linkers = [linker] + else: + default_linkers = [[l] for l in defaults['static_linker']] + if isinstance(compiler, CudaCompiler): + linkers = [defaults['cuda_static_linker']] + default_linkers + elif isinstance(compiler, VisualStudioLikeCompiler): + linkers = [defaults['vs_static_linker'], defaults['clang_cl_static_linker']] + elif isinstance(compiler, GnuCompiler): + # Use gcc-ar if available; needed for LTO + linkers = [defaults['gcc_static_linker']] + default_linkers + elif isinstance(compiler, ClangCompiler): + # Use llvm-ar if available; needed for LTO + linkers = [defaults['clang_static_linker']] + default_linkers + elif isinstance(compiler, DCompiler): + # Prefer static linkers over linkers used by D compilers + if is_windows(): + linkers = [defaults['vs_static_linker'], defaults['clang_cl_static_linker'], compiler.get_linker_exelist()] + else: + linkers = default_linkers + elif isinstance(compiler, IntelClCCompiler): + # Intel has it's own linker that acts like microsoft's lib + linkers = [['xilib']] + elif isinstance(compiler, (PGICCompiler, PGIFortranCompiler)) and is_windows(): + linkers = [['ar']] # For PGI on Windows, "ar" is just a wrapper calling link/lib. + else: + linkers = default_linkers + popen_exceptions = {} + for linker in linkers: + if not {'lib', 'lib.exe', 'llvm-lib', 'llvm-lib.exe', 'xilib', 'xilib.exe'}.isdisjoint(linker): + arg = '/?' + elif not {'ar2000', 'ar2000.exe'}.isdisjoint(linker): + arg = '?' + else: + arg = '--version' + try: + p, out, err = Popen_safe(linker + [arg]) + except OSError as e: + popen_exceptions[' '.join(linker + [arg])] = e + continue + if "xilib: executing 'lib'" in err: + return IntelVisualStudioLinker(linker, getattr(compiler, 'machine', None)) + if '/OUT:' in out.upper() or '/OUT:' in err.upper(): + return VisualStudioLinker(linker, getattr(compiler, 'machine', None)) + if 'ar-Error-Unknown switch: --version' in err: + return PGIStaticLinker(linker) + if p.returncode == 0 and ('armar' in linker or 'armar.exe' in linker): + return ArmarLinker(linker) + if 'DMD32 D Compiler' in out or 'DMD64 D Compiler' in out: + assert isinstance(compiler, DCompiler) + return DLinker(linker, compiler.arch) + if 'LDC - the LLVM D compiler' in out: + assert isinstance(compiler, DCompiler) + return DLinker(linker, compiler.arch, rsp_syntax=compiler.rsp_file_syntax()) + if 'GDC' in out and ' based on D ' in out: + assert isinstance(compiler, DCompiler) + return DLinker(linker, compiler.arch) + if err.startswith('Renesas') and ('rlink' in linker or 'rlink.exe' in linker): + return CcrxLinker(linker) + if out.startswith('GNU ar') and ('xc16-ar' in linker or 'xc16-ar.exe' in linker): + return Xc16Linker(linker) + if out.startswith('TMS320C2000') and ('ar2000' in linker or 'ar2000.exe' in linker): + return C2000Linker(linker) + if out.startswith('The CompCert'): + return CompCertLinker(linker) + if p.returncode == 0: + return ArLinker(linker) + if p.returncode == 1 and err.startswith('usage'): # OSX + return ArLinker(linker) + if p.returncode == 1 and err.startswith('Usage'): # AIX + return AIXArLinker(linker) + if p.returncode == 1 and err.startswith('ar: bad option: --'): # Solaris + return ArLinker(linker) + _handle_exceptions(popen_exceptions, linkers, 'linker') + + + +# Compilers +# ========= + + +def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: MachineChoice, *, override_compiler: T.Optional[T.List[str]] = None) -> Compiler: + """Shared implementation for finding the C or C++ compiler to use. + + the override_compiler option is provided to allow compilers which use + the compiler (GCC or Clang usually) as their shared linker, to find + the linker they need. + """ + popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {} + compilers, ccache, exe_wrap = _get_compilers(env, lang, for_machine) + if override_compiler is not None: + compilers = [override_compiler] + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + cls: T.Union[T.Type[CCompiler], T.Type[CPPCompiler]] + + for compiler in compilers: + if isinstance(compiler, str): + compiler = [compiler] + compiler_name = os.path.basename(compiler[0]) + + if any(os.path.basename(x) in {'cl', 'cl.exe', 'clang-cl', 'clang-cl.exe'} for x in compiler): + # Watcom C provides it's own cl.exe clone that mimics an older + # version of Microsoft's compiler. Since Watcom's cl.exe is + # just a wrapper, we skip using it if we detect its presence + # so as not to confuse Meson when configuring for MSVC. + # + # Additionally the help text of Watcom's cl.exe is paged, and + # the binary will not exit without human intervention. In + # practice, Meson will block waiting for Watcom's cl.exe to + # exit, which requires user input and thus will never exit. + if 'WATCOM' in os.environ: + def sanitize(p: str) -> str: + return os.path.normcase(os.path.abspath(p)) + + watcom_cls = [sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl')), + sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl.exe'))] + found_cl = sanitize(shutil.which('cl')) + if found_cl in watcom_cls: + continue + arg = '/?' + elif 'armcc' in compiler_name: + arg = '--vsn' + elif 'ccrx' in compiler_name: + arg = '-v' + elif 'xc16' in compiler_name: + arg = '--version' + elif 'ccomp' in compiler_name: + arg = '-version' + elif 'cl2000' in compiler_name: + arg = '-version' + elif compiler_name in {'icl', 'icl.exe'}: + # if you pass anything to icl you get stuck in a pager + arg = '' + else: + arg = '--version' + + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[' '.join(compiler + [arg])] = e + continue + + if 'ccrx' in compiler_name: + out = err + + full_version = out.split('\n', 1)[0] + version = search_version(out) + + guess_gcc_or_lcc: T.Optional[str] = None + if 'Free Software Foundation' in out or 'xt-' in out: + guess_gcc_or_lcc = 'gcc' + if 'e2k' in out and 'lcc' in out: + guess_gcc_or_lcc = 'lcc' + if 'Microchip Technology' in out: + # this output has "Free Software Foundation" in its version + guess_gcc_or_lcc = None + + if guess_gcc_or_lcc: + defines = _get_gnu_compiler_defines(compiler) + if not defines: + popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' + continue + + if guess_gcc_or_lcc == 'lcc': + version = _get_lcc_version_from_defines(defines) + cls = ElbrusCCompiler if lang == 'c' else ElbrusCPPCompiler + else: + version = _get_gnu_version_from_defines(defines) + cls = GnuCCompiler if lang == 'c' else GnuCPPCompiler + + linker = guess_nix_linker(env, compiler, cls, for_machine) + + return cls( + ccache + compiler, version, for_machine, is_cross, + info, exe_wrap, defines=defines, full_version=full_version, + linker=linker) + + if 'Emscripten' in out: + cls = EmscriptenCCompiler if lang == 'c' else EmscriptenCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + + # emcc requires a file input in order to pass arguments to the + # linker. It'll exit with an error code, but still print the + # linker version. Old emcc versions ignore -Wl,--version completely, + # however. We'll report "unknown version" in that case. + with tempfile.NamedTemporaryFile(suffix='.c') as f: + cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name] + _, o, _ = Popen_safe(cmd) + + linker = WASMDynamicLinker( + compiler, for_machine, cls.LINKER_PREFIX, + [], version=search_version(o)) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, linker=linker, full_version=full_version) + + if 'armclang' in out: + # The compiler version is not present in the first line of output, + # instead it is present in second line, startswith 'Component:'. + # So, searching for the 'Component' in out although we know it is + # present in second line, as we are not sure about the + # output format in future versions + arm_ver_match = re.search('.*Component.*', out) + if arm_ver_match is None: + popen_exceptions[' '.join(compiler)] = 'version string not found' + continue + arm_ver_str = arm_ver_match.group(0) + # Override previous values + version = search_version(arm_ver_str) + full_version = arm_ver_str + cls = ArmclangCCompiler if lang == 'c' else ArmclangCPPCompiler + linker = ArmClangDynamicLinker(for_machine, version=version) + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + if 'CL.EXE COMPATIBILITY' in out: + # if this is clang-cl masquerading as cl, detect it as cl, not + # clang + arg = '--version' + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[' '.join(compiler + [arg])] = e + version = search_version(out) + match = re.search('^Target: (.*?)-', out, re.MULTILINE) + if match: + target = match.group(1) + else: + target = 'unknown target' + cls = ClangClCCompiler if lang == 'c' else ClangClCPPCompiler + linker = guess_win_linker(env, ['lld-link'], cls, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, target, + exe_wrap, linker=linker) + if 'clang' in out or 'Clang' in out: + linker = None + + defines = _get_clang_compiler_defines(compiler) + + # Even if the for_machine is darwin, we could be using vanilla + # clang. + if 'Apple' in out: + cls = AppleClangCCompiler if lang == 'c' else AppleClangCPPCompiler + else: + cls = ClangCCompiler if lang == 'c' else ClangCPPCompiler + + if 'windows' in out or env.machines[for_machine].is_windows(): + # If we're in a MINGW context this actually will use a gnu + # style ld, but for clang on "real" windows we'll use + # either link.exe or lld-link.exe + try: + linker = guess_win_linker(env, compiler, cls, for_machine, invoked_directly=False) + except MesonException: + pass + if linker is None: + linker = guess_nix_linker(env, compiler, cls, for_machine) + + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, defines=defines, full_version=full_version, linker=linker) + + if 'Intel(R) C++ Intel(R)' in err: + version = search_version(err) + target = 'x86' if 'IA-32' in err else 'x86_64' + cls = IntelClCCompiler if lang == 'c' else IntelClCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = XilinkDynamicLinker(for_machine, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, target, + exe_wrap, linker=linker) + if 'Microsoft' in out or 'Microsoft' in err: + # Latest versions of Visual Studio print version + # number to stderr but earlier ones print version + # on stdout. Why? Lord only knows. + # Check both outputs to figure out version. + for lookat in [err, out]: + version = search_version(lookat) + if version != 'unknown version': + break + else: + raise EnvironmentException(f'Failed to detect MSVC compiler version: stderr was\n{err!r}') + cl_signature = lookat.split('\n')[0] + match = re.search(r'.*(x86|x64|ARM|ARM64)([^_A-Za-z0-9]|$)', cl_signature) + if match: + target = match.group(1) + else: + m = f'Failed to detect MSVC compiler target architecture: \'cl /?\' output is\n{cl_signature}' + raise EnvironmentException(m) + cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler + linker = guess_win_linker(env, ['link'], cls, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, target, + exe_wrap, full_version=cl_signature, linker=linker) + if 'PGI Compilers' in out: + cls = PGICCompiler if lang == 'c' else PGICPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, + info, exe_wrap, linker=linker) + if 'NVIDIA Compilers and Tools' in out: + cls = NvidiaHPC_CCompiler if lang == 'c' else NvidiaHPC_CPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, + info, exe_wrap, linker=linker) + if '(ICC)' in out: + cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler + l = guess_nix_linker(env, compiler, cls, for_machine) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=l) + if 'ARM' in out: + cls = ArmCCompiler if lang == 'c' else ArmCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = ArmDynamicLinker(for_machine, version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, + info, exe_wrap, full_version=full_version, linker=linker) + if 'RX Family' in out: + cls = CcrxCCompiler if lang == 'c' else CcrxCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = CcrxDynamicLinker(for_machine, version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Microchip Technology' in out: + cls = Xc16CCompiler if lang == 'c' else Xc16CCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = Xc16DynamicLinker(for_machine, version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'CompCert' in out: + cls = CompCertCCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = CompCertDynamicLinker(for_machine, version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'TMS320C2000 C/C++' in out: + cls = C2000CCompiler if lang == 'c' else C2000CPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = C2000DynamicLinker(compiler, for_machine, version=version) + return cls( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException(f'Unknown compiler {compilers}') + +def detect_c_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + return _detect_c_or_cpp_compiler(env, 'c', for_machine) + +def detect_cpp_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + return _detect_c_or_cpp_compiler(env, 'cpp', for_machine) + +def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + popen_exceptions = {} + is_cross = env.is_cross_build(for_machine) + compilers, ccache, exe_wrap = _get_compilers(env, 'cuda', for_machine) + info = env.machines[for_machine] + for compiler in compilers: + arg = '--version' + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[' '.join(compiler + [arg])] = e + continue + # Example nvcc printout: + # + # nvcc: NVIDIA (R) Cuda compiler driver + # Copyright (c) 2005-2018 NVIDIA Corporation + # Built on Sat_Aug_25_21:08:01_CDT_2018 + # Cuda compilation tools, release 10.0, V10.0.130 + # + # search_version() first finds the "10.0" after "release", + # rather than the more precise "10.0.130" after "V". + # The patch version number is occasionally important; For + # instance, on Linux, + # - CUDA Toolkit 8.0.44 requires NVIDIA Driver 367.48 + # - CUDA Toolkit 8.0.61 requires NVIDIA Driver 375.26 + # Luckily, the "V" also makes it very simple to extract + # the full version: + version = out.strip().split('V')[-1] + cpp_compiler = detect_cpp_compiler(env, for_machine) + cls = CudaCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = CudaLinker(compiler, for_machine, CudaCompiler.LINKER_PREFIX, [], version=CudaLinker.parse_version()) + return cls(ccache + compiler, version, for_machine, is_cross, exe_wrap, host_compiler=cpp_compiler, info=info, linker=linker) + raise EnvironmentException(f'Could not find suitable CUDA compiler: "{"; ".join([" ".join(c) for c in compilers])}"') + +def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {} + compilers, ccache, exe_wrap = _get_compilers(env, 'fortran', for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + cls: T.Type[FortranCompiler] + for compiler in compilers: + for arg in ['--version', '-V']: + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[' '.join(compiler + [arg])] = e + continue + + version = search_version(out) + full_version = out.split('\n', 1)[0] + + guess_gcc_or_lcc: T.Optional[str] = None + if 'GNU Fortran' in out: + guess_gcc_or_lcc = 'gcc' + if 'e2k' in out and 'lcc' in out: + guess_gcc_or_lcc = 'lcc' + + if guess_gcc_or_lcc: + defines = _get_gnu_compiler_defines(compiler) + if not defines: + popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' + continue + if guess_gcc_or_lcc == 'lcc': + version = _get_lcc_version_from_defines(defines) + cls = ElbrusFortranCompiler + else: + version = _get_gnu_version_from_defines(defines) + cls = GnuFortranCompiler + linker = guess_nix_linker(env, compiler, cls, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, defines, full_version=full_version, + linker=linker) + + if 'G95' in out: + cls = G95FortranCompiler + linker = guess_nix_linker(env, compiler, cls, for_machine) + return G95FortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Sun Fortran' in err: + version = search_version(err) + cls = SunFortranCompiler + linker = guess_nix_linker(env, compiler, cls, for_machine) + return SunFortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Intel(R) Visual Fortran' in err or 'Intel(R) Fortran' in err: + version = search_version(err) + target = 'x86' if 'IA-32' in err else 'x86_64' + cls = IntelClFortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = XilinkDynamicLinker(for_machine, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, + target, exe_wrap, linker=linker) + + if 'ifort (IFORT)' in out: + linker = guess_nix_linker(env, compiler, IntelFortranCompiler, for_machine) + return IntelFortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'PathScale EKOPath(tm)' in err: + return PathScaleFortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version) + + if 'PGI Compilers' in out: + cls = PGIFortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = PGIDynamicLinker(compiler, for_machine, + cls.LINKER_PREFIX, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, exe_wrap, + full_version=full_version, linker=linker) + + if 'NVIDIA Compilers and Tools' in out: + cls = NvidiaHPC_FortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = PGIDynamicLinker(compiler, for_machine, + cls.LINKER_PREFIX, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, exe_wrap, + full_version=full_version, linker=linker) + + if 'flang' in out or 'clang' in out: + linker = guess_nix_linker(env, + compiler, FlangFortranCompiler, for_machine) + return FlangFortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Open64 Compiler Suite' in err: + linker = guess_nix_linker(env, + compiler, Open64FortranCompiler, for_machine) + return Open64FortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'NAG Fortran' in err: + linker = guess_nix_linker(env, + compiler, NAGFortranCompiler, for_machine) + return NAGFortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_objc_compiler(env: 'Environment', for_machine: MachineChoice) -> 'Compiler': + return _detect_objc_or_objcpp_compiler(env, for_machine, True) + +def detect_objcpp_compiler(env: 'Environment', for_machine: MachineChoice) -> 'Compiler': + return _detect_objc_or_objcpp_compiler(env, for_machine, False) + +def _detect_objc_or_objcpp_compiler(env: 'Environment', for_machine: MachineChoice, objc: bool) -> 'Compiler': + popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {} + compilers, ccache, exe_wrap = _get_compilers(env, 'objc' if objc else 'objcpp', for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + comp: T.Union[T.Type[ObjCCompiler], T.Type[ObjCPPCompiler]] + + for compiler in compilers: + arg = ['--version'] + try: + p, out, err = Popen_safe(compiler + arg) + except OSError as e: + popen_exceptions[' '.join(compiler + arg)] = e + continue + version = search_version(out) + if 'Free Software Foundation' in out: + defines = _get_gnu_compiler_defines(compiler) + if not defines: + popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' + continue + version = _get_gnu_version_from_defines(defines) + comp = GnuObjCCompiler if objc else GnuObjCPPCompiler + linker = guess_nix_linker(env, compiler, comp, for_machine) + return comp( + ccache + compiler, version, for_machine, is_cross, info, + exe_wrap, defines, linker=linker) + if 'clang' in out: + linker = None + defines = _get_clang_compiler_defines(compiler) + if not defines: + popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' + continue + if 'Apple' in out: + comp = AppleClangObjCCompiler if objc else AppleClangObjCPPCompiler + else: + comp = ClangObjCCompiler if objc else ClangObjCPPCompiler + if 'windows' in out or env.machines[for_machine].is_windows(): + # If we're in a MINGW context this actually will use a gnu style ld + try: + linker = guess_win_linker(env, compiler, comp, for_machine) + except MesonException: + pass + + if not linker: + linker = guess_nix_linker(env, compiler, comp, for_machine) + return comp( + ccache + compiler, version, for_machine, + is_cross, info, exe_wrap, linker=linker, defines=defines) + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_java_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + exelist = env.lookup_binary_entry(for_machine, 'java') + info = env.machines[for_machine] + if exelist is None: + # TODO support fallback + exelist = [defaults['java'][0]] + + try: + p, out, err = Popen_safe(exelist + ['-version']) + except OSError: + raise EnvironmentException('Could not execute Java compiler "{}"'.format(' '.join(exelist))) + if 'javac' in out or 'javac' in err: + version = search_version(err if 'javac' in err else out) + if not version or version == 'unknown version': + parts = (err if 'javac' in err else out).split() + if len(parts) > 1: + version = parts[1] + comp_class = JavaCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class(exelist, version, for_machine, info) + raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') + +def detect_cs_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + compilers, ccache, exe_wrap = _get_compilers(env, 'cs', for_machine) + popen_exceptions = {} + info = env.machines[for_machine] + for comp in compilers: + try: + p, out, err = Popen_safe(comp + ['--version']) + except OSError as e: + popen_exceptions[' '.join(comp + ['--version'])] = e + continue + + version = search_version(out) + cls: T.Union[T.Type[MonoCompiler], T.Type[VisualStudioCsCompiler]] + if 'Mono' in out: + cls = MonoCompiler + elif "Visual C#" in out: + cls = VisualStudioCsCompiler + else: + continue + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + return cls(comp, version, for_machine, info) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + """Search for a cython compiler.""" + compilers, _, _ = _get_compilers(env, 'cython', for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + + popen_exceptions: T.Dict[str, Exception] = {} + for comp in compilers: + try: + err = Popen_safe(comp + ['-V'])[2] + except OSError as e: + popen_exceptions[' '.join(comp + ['-V'])] = e + continue + + version = search_version(err) + if 'Cython' in err: + comp_class = CythonCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class(comp, version, for_machine, info, is_cross=is_cross) + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + exelist = env.lookup_binary_entry(for_machine, 'vala') + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + if exelist is None: + # TODO support fallback + exelist = [defaults['vala'][0]] + + try: + p, out = Popen_safe(exelist + ['--version'])[0:2] + except OSError: + raise EnvironmentException('Could not execute Vala compiler "{}"'.format(' '.join(exelist))) + version = search_version(out) + if 'Vala' in out: + comp_class = ValaCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class(exelist, version, for_machine, is_cross, info) + raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') + +def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> RustCompiler: + popen_exceptions = {} # type: T.Dict[str, Exception] + compilers, _, exe_wrap = _get_compilers(env, 'rust', for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + + cc = detect_c_compiler(env, for_machine) + is_link_exe = isinstance(cc.linker, VisualStudioLikeLinkerMixin) + override = env.lookup_binary_entry(for_machine, 'rust_ld') + + for compiler in compilers: + arg = ['--version'] + try: + out = Popen_safe(compiler + arg)[1] + except OSError as e: + popen_exceptions[' '.join(compiler + arg)] = e + continue + + version = search_version(out) + + if 'rustc' in out: + # On Linux and mac rustc will invoke gcc (clang for mac + # presumably) and it can do this windows, for dynamic linking. + # this means the easiest way to C compiler for dynamic linking. + # figure out what linker to use is to just get the value of the + # C compiler and use that as the basis of the rust linker. + # However, there are two things we need to change, if CC is not + # the default use that, and second add the necessary arguments + # to rust to use -fuse-ld + + if any(a.startswith('linker=') for a in compiler): + mlog.warning( + 'Please do not put -C linker= in your compiler ' + 'command, set rust_ld=command in your cross file ' + 'or use the RUST_LD environment variable, otherwise meson ' + 'will override your selection.') + + if override is None: + extra_args: T.Dict[str, T.Union[str, bool]] = {} + always_args: T.List[str] = [] + if is_link_exe: + compiler.extend(RustCompiler.use_linker_args(cc.linker.exelist[0])) + extra_args['direct'] = True + extra_args['machine'] = cc.linker.machine + else: + exelist = cc.linker.exelist + cc.linker.get_always_args() + if 'ccache' in exelist[0]: + del exelist[0] + c = exelist.pop(0) + compiler.extend(RustCompiler.use_linker_args(c)) + + # Also ensure that we pass any extra arguments to the linker + for l in exelist: + compiler.extend(['-C', f'link-arg={l}']) + + # This trickery with type() gets us the class of the linker + # so we can initialize a new copy for the Rust Compiler + # TODO rewrite this without type: ignore + if is_link_exe: + linker = type(cc.linker)(for_machine, always_args, exelist=cc.linker.exelist, # type: ignore + version=cc.linker.version, **extra_args) # type: ignore + else: + linker = type(cc.linker)(compiler, for_machine, cc.LINKER_PREFIX, + always_args=always_args, version=cc.linker.version, + **extra_args) # type: ignore + elif 'link' in override[0]: + linker = guess_win_linker(env, + override, RustCompiler, for_machine, use_linker_prefix=False) + # rustc takes linker arguments without a prefix, and + # inserts the correct prefix itself. + assert isinstance(linker, VisualStudioLikeLinkerMixin) + linker.direct = True + compiler.extend(RustCompiler.use_linker_args(linker.exelist[0])) + else: + # On linux and macos rust will invoke the c compiler for + # linking, on windows it will use lld-link or link.exe. + # we will simply ask for the C compiler that corresponds to + # it, and use that. + cc = _detect_c_or_cpp_compiler(env, 'c', for_machine, override_compiler=override) + linker = cc.linker + + # Of course, we're not going to use any of that, we just + # need it to get the proper arguments to pass to rustc + c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0] + compiler.extend(RustCompiler.use_linker_args(c)) + + env.coredata.add_lang_args(RustCompiler.language, RustCompiler, for_machine, env) + return RustCompiler( + compiler, version, for_machine, is_cross, info, exe_wrap, + linker=linker) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + info = env.machines[for_machine] + + # Detect the target architecture, required for proper architecture handling on Windows. + # MSVC compiler is required for correct platform detection. + c_compiler = {'c': detect_c_compiler(env, for_machine)} + is_msvc = isinstance(c_compiler['c'], VisualStudioCCompiler) + if not is_msvc: + c_compiler = {} + + # Import here to avoid circular imports + from ..environment import detect_cpu_family + arch = detect_cpu_family(c_compiler) + if is_msvc and arch == 'x86': + arch = 'x86_mscoff' + + popen_exceptions = {} + is_cross = env.is_cross_build(for_machine) + compilers, ccache, exe_wrap = _get_compilers(env, 'd', for_machine) + for exelist in compilers: + # Search for a D compiler. + # We prefer LDC over GDC unless overridden with the DC + # environment variable because LDC has a much more + # up to date language version at time (2016). + if os.path.basename(exelist[-1]).startswith(('ldmd', 'gdmd')): + raise EnvironmentException( + f'Meson does not support {exelist[-1]} as it is only a DMD frontend for another compiler.' + 'Please provide a valid value for DC or unset it so that Meson can resolve the compiler by itself.') + try: + p, out = Popen_safe(exelist + ['--version'])[0:2] + except OSError as e: + popen_exceptions[' '.join(exelist + ['--version'])] = e + continue + version = search_version(out) + full_version = out.split('\n', 1)[0] + + if 'LLVM D compiler' in out: + # LDC seems to require a file + # We cannot use NamedTemproraryFile on windows, its documented + # to not work for our uses. So, just use mkstemp and only have + # one path for simplicity. + o, f = tempfile.mkstemp('.d') + os.close(o) + + try: + if info.is_windows() or info.is_cygwin(): + objfile = os.path.basename(f)[:-1] + 'obj' + linker = guess_win_linker(env, + exelist, + LLVMDCompiler, for_machine, + use_linker_prefix=True, invoked_directly=False, + extra_args=[f]) + else: + # LDC writes an object file to the current working directory. + # Clean it up. + objfile = os.path.basename(f)[:-1] + 'o' + linker = guess_nix_linker(env, + exelist, LLVMDCompiler, for_machine, + extra_args=[f]) + finally: + windows_proof_rm(f) + windows_proof_rm(objfile) + + return LLVMDCompiler( + exelist, version, for_machine, info, arch, + full_version=full_version, linker=linker, version_output=out) + elif 'gdc' in out: + linker = guess_nix_linker(env, exelist, GnuDCompiler, for_machine) + return GnuDCompiler( + exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrap, is_cross=is_cross, + full_version=full_version, linker=linker) + elif 'The D Language Foundation' in out or 'Digital Mars' in out: + # DMD seems to require a file + # We cannot use NamedTemproraryFile on windows, its documented + # to not work for our uses. So, just use mkstemp and only have + # one path for simplicity. + o, f = tempfile.mkstemp('.d') + os.close(o) + + # DMD as different detection logic for x86 and x86_64 + arch_arg = '-m64' if arch == 'x86_64' else '-m32' + + try: + if info.is_windows() or info.is_cygwin(): + objfile = os.path.basename(f)[:-1] + 'obj' + linker = guess_win_linker(env, + exelist, DmdDCompiler, for_machine, + invoked_directly=False, extra_args=[f, arch_arg]) + else: + objfile = os.path.basename(f)[:-1] + 'o' + linker = guess_nix_linker(env, + exelist, DmdDCompiler, for_machine, + extra_args=[f, arch_arg]) + finally: + windows_proof_rm(f) + windows_proof_rm(objfile) + + return DmdDCompiler( + exelist, version, for_machine, info, arch, + full_version=full_version, linker=linker) + raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_swift_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + exelist = env.lookup_binary_entry(for_machine, 'swift') + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + if exelist is None: + # TODO support fallback + exelist = [defaults['swift'][0]] + + try: + p, _, err = Popen_safe(exelist + ['-v']) + except OSError: + raise EnvironmentException('Could not execute Swift compiler "{}"'.format(' '.join(exelist))) + version = search_version(err) + if 'Swift' in err: + # As for 5.0.1 swiftc *requires* a file to check the linker: + with tempfile.NamedTemporaryFile(suffix='.swift') as f: + linker = guess_nix_linker(env, + exelist, SwiftCompiler, for_machine, + extra_args=[f.name]) + return SwiftCompiler( + exelist, version, for_machine, is_cross, info, linker=linker) + + raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') + + +# GNU/Clang defines and version +# ============================= + +def _get_gnu_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: + """ + Detect GNU compiler platform type (Apple, MinGW, Unix) + """ + # Arguments to output compiler pre-processor defines to stdout + # gcc, g++, and gfortran all support these arguments + args = compiler + ['-E', '-dM', '-'] + p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to detect GNU compiler type:\n' + output + error) + # Parse several lines of the type: + # `#define ___SOME_DEF some_value` + # and extract `___SOME_DEF` + defines: T.Dict[str, str] = {} + for line in output.split('\n'): + if not line: + continue + d, *rest = line.split(' ', 2) + if d != '#define': + continue + if len(rest) == 1: + defines[rest[0]] = '' + if len(rest) == 2: + defines[rest[0]] = rest[1] + return defines + +def _get_clang_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: + """ + Get the list of Clang pre-processor defines + """ + args = compiler + ['-E', '-dM', '-'] + p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to get clang pre-processor defines:\n' + output + error) + defines: T.Dict[str, str] = {} + for line in output.split('\n'): + if not line: + continue + d, *rest = line.split(' ', 2) + if d != '#define': + continue + if len(rest) == 1: + defines[rest[0]] = '' + if len(rest) == 2: + defines[rest[0]] = rest[1] + return defines + +def _get_gnu_version_from_defines(defines: T.Dict[str, str]) -> str: + dot = '.' + major = defines.get('__GNUC__', '0') + minor = defines.get('__GNUC_MINOR__', '0') + patch = defines.get('__GNUC_PATCHLEVEL__', '0') + return dot.join((major, minor, patch)) + +def _get_lcc_version_from_defines(defines: T.Dict[str, str]) -> str: + dot = '.' + generation_and_major = defines.get('__LCC__', '100') + generation = generation_and_major[:1] + major = generation_and_major[1:] + minor = defines.get('__LCC_MINOR__', '0') + return dot.join((generation, major, minor)) diff --git a/meson/mesonbuild/compilers/fortran.py b/meson/mesonbuild/compilers/fortran.py new file mode 100644 index 000000000..e15ac569d --- /dev/null +++ b/meson/mesonbuild/compilers/fortran.py @@ -0,0 +1,504 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +import typing as T +import subprocess, os + +from .. import coredata +from .compilers import ( + clike_debug_args, + Compiler, +) +from .mixins.clike import CLikeCompiler +from .mixins.gnu import ( + GnuCompiler, gnulike_buildtype_args, gnu_optimization_args, +) +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .mixins.elbrus import ElbrusCompiler +from .mixins.pgi import PGICompiler + +from mesonbuild.mesonlib import ( + version_compare, EnvironmentException, MesonException, MachineChoice, + LibType, OptionKey, +) + +if T.TYPE_CHECKING: + from ..coredata import KeyedOptionDictType + from ..dependencies import Dependency + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import DynamicLinker + from ..programs import ExternalProgram + + +class FortranCompiler(CLikeCompiler, Compiler): + + language = 'fortran' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + Compiler.__init__(self, exelist, version, for_machine, info, + is_cross=is_cross, full_version=full_version, linker=linker) + CLikeCompiler.__init__(self, exe_wrapper) + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise MesonException('Fortran does not have "has_function" capability.\n' + 'It is better to test if a Fortran capability is working like:\n\n' + "meson.get_compiler('fortran').links('block; end block; end program')\n\n" + 'that example is to see if the compiler has Fortran 2008 Block element.') + + def sanity_check(self, work_dir_: str, environment: 'Environment') -> None: + work_dir = Path(work_dir_) + source_name = work_dir / 'sanitycheckf.f90' + binary_name = work_dir / 'sanitycheckf' + if binary_name.is_file(): + binary_name.unlink() + + source_name.write_text('program main; print *, "Fortran compilation is working."; end program', encoding='utf-8') + + extra_flags: T.List[str] = [] + extra_flags += environment.coredata.get_external_args(self.for_machine, self.language) + extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language) + extra_flags += self.get_always_args() + # %% build the test executable "sanitycheckf" + # cwd=work_dir is necessary on Windows especially for Intel compilers to avoid error: cannot write on sanitycheckf.obj + # this is a defect with how Windows handles files and ifort's object file-writing behavior vis concurrent ProcessPoolExecutor. + # This simple workaround solves the issue. + # FIXME: cwd=str(work_dir) is for Python 3.5 on Windows, when 3.5 is deprcated, this can become cwd=work_dir + returncode = subprocess.run(self.exelist + extra_flags + [str(source_name), '-o', str(binary_name)], + cwd=str(work_dir)).returncode + if returncode != 0: + raise EnvironmentException('Compiler %s can not compile programs.' % self.name_string()) + if self.is_cross: + if self.exe_wrapper is None: + # Can't check if the binaries run so we have to assume they do + return + cmdlist = self.exe_wrapper.get_command() + [str(binary_name)] + else: + cmdlist = [str(binary_name)] + # %% Run the test executable + try: + returncode = subprocess.run(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL).returncode + if returncode != 0: + raise EnvironmentException('Executables created by Fortran compiler %s are not runnable.' % self.name_string()) + except OSError: + raise EnvironmentException('Executables created by Fortran compiler %s are not runnable.' % self.name_string()) + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return gnulike_buildtype_args[buildtype] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return gnu_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return clike_debug_args[is_debug] + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-cpp'] + super().get_preprocess_only_args() + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + return ('-I', ) + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-module', path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list + + def module_name_to_filename(self, module_name: str) -> str: + if '_' in module_name: # submodule + s = module_name.lower() + if self.id in ('gcc', 'intel', 'intel-cl'): + filename = s.replace('_', '@') + '.smod' + elif self.id in ('pgi', 'flang'): + filename = s.replace('_', '-') + '.mod' + else: + filename = s + '.mod' + else: # module + filename = module_name.lower() + '.mod' + + return filename + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: + code = 'stop; end program' + return self._find_library_impl(libname, env, extra_dirs, code, libtype) + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_arguments(args, env, 'stop; end program') + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_link_arguments(args, env, 'stop; end program') + + def get_options(self) -> 'KeyedOptionDictType': + opts = super().get_options() + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key: coredata.UserComboOption( + 'Fortran language standard to use', + ['none'], + 'none', + ), + }) + return opts + + +class GnuFortranCompiler(GnuCompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + GnuCompiler.__init__(self, defines) + default_warn_args = ['-Wall'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = FortranCompiler.get_options(self) + fortran_stds = ['legacy', 'f95', 'f2003'] + if version_compare(self.version, '>=4.4.0'): + fortran_stds += ['f2008'] + if version_compare(self.version, '>=8.0.0'): + fortran_stds += ['f2018'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + fortran_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + # Disabled until this is fixed: + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=62162 + # return ['-cpp', '-MD', '-MQ', outtarget] + return [] + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-J' + path] + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lgfortran', '-lm'] + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + ''' + Derived from mixins/clike.py:has_header, but without C-style usage of + __has_include which breaks with GCC-Fortran 10: + https://github.com/mesonbuild/meson/issues/7017 + ''' + code = f'{prefix}\n#include <{hname}>' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode='preprocess', disable_cache=disable_cache) + + +class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + GnuFortranCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, defines=defines, + linker=linker, full_version=full_version) + ElbrusCompiler.__init__(self) + +class G95FortranCompiler(FortranCompiler): + + LINKER_PREFIX = '-Wl,' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + self.id = 'g95' + default_warn_args = ['-Wall'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-pedantic']} + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-fmod=' + path] + + def get_no_warn_args(self) -> T.List[str]: + # FIXME: Confirm that there's no compiler option to disable all warnings + return [] + + +class SunFortranCompiler(FortranCompiler): + + LINKER_PREFIX = '-Wl,' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + self.id = 'sun' + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-fpp'] + + def get_always_args(self) -> T.List[str]: + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return [] + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + return ('-M', ) + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-moddir=' + path] + + def openmp_flags(self) -> T.List[str]: + return ['-xopenmp'] + + +class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): + + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + # FIXME: Add support for OS X and Windows in detect_fortran_compiler so + # we are sent the type of compiler + IntelGnuLikeCompiler.__init__(self) + self.id = 'intel' + default_warn_args = ['-warn', 'general', '-warn', 'truncated_source'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-warn', 'unused'], + '3': ['-warn', 'all']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = FortranCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} + if std.value != 'none': + args.append('-stand=' + stds[std.value]) + return args + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-cpp', '-EP'] + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lifcore', '-limf'] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-gen-dep=' + outtarget, '-gen-depformat=make'] + + +class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): + + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) + always_args = ['/nologo'] + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + IntelVisualStudioLikeCompiler.__init__(self, target) + + default_warn_args = ['/warn:general', '/warn:truncated_source'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['/warn:unused'], + '3': ['/warn:all']} + + def get_options(self) -> 'KeyedOptionDictType': + opts = FortranCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} + if std.value != 'none': + args.append('/stand:' + stds[std.value]) + return args + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['/module:' + path] + + +class PathScaleFortranCompiler(FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + self.id = 'pathscale' + default_warn_args = ['-fullwarn'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args} + + def openmp_flags(self) -> T.List[str]: + return ['-mp'] + + +class PGIFortranCompiler(PGICompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + PGICompiler.__init__(self) + + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args + ['-Mdclchk']} + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lpgf90rtl', '-lpgf90', '-lpgf90_rpm1', '-lpgf902', + '-lpgf90rtl', '-lpgftnrtl', '-lrt'] + + +class NvidiaHPC_FortranCompiler(PGICompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + PGICompiler.__init__(self) + + self.id = 'nvidia_hpc' + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args + ['-Mdclchk']} + + +class FlangFortranCompiler(ClangCompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + ClangCompiler.__init__(self, {}) + self.id = 'flang' + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args} + + def language_stdlib_only_link_flags(self) -> T.List[str]: + return ['-lflang', '-lpgmath'] + +class Open64FortranCompiler(FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + self.id = 'open64' + default_warn_args = ['-fullwarn'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args} + + def openmp_flags(self) -> T.List[str]: + return ['-mp'] + + +class NAGFortranCompiler(FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + self.id = 'nagfor' + + def get_warn_args(self, level: str) -> T.List[str]: + return [] + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-mdir', path] + + def openmp_flags(self) -> T.List[str]: + return ['-openmp'] diff --git a/meson/mesonbuild/compilers/java.py b/meson/mesonbuild/compilers/java.py new file mode 100644 index 000000000..ab8245057 --- /dev/null +++ b/meson/mesonbuild/compilers/java.py @@ -0,0 +1,104 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import shutil +import subprocess +import textwrap +import typing as T + +from ..mesonlib import EnvironmentException, MachineChoice +from .compilers import Compiler, java_buildtype_args +from .mixins.islinker import BasicLinkerIsCompilerMixin + +if T.TYPE_CHECKING: + from ..envconfig import MachineInfo + from ..environment import Environment + +class JavaCompiler(BasicLinkerIsCompilerMixin, Compiler): + + language = 'java' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', full_version: T.Optional[str] = None): + super().__init__(exelist, version, for_machine, info, full_version=full_version) + self.id = 'unknown' + self.javarunner = 'java' + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_output_args(self, subdir: str) -> T.List[str]: + if subdir == '': + subdir = './' + return ['-d', subdir, '-s', subdir] + + def get_pic_args(self) -> T.List[str]: + return [] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def get_pch_name(self, name: str) -> str: + return '' + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return java_buildtype_args[buildtype] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i in ['-cp', '-classpath', '-sourcepath'] and idx + 1 < len(parameter_list): + path_list = parameter_list[idx + 1].split(os.pathsep) + path_list = [os.path.normpath(os.path.join(build_dir, x)) for x in path_list] + parameter_list[idx + 1] = os.pathsep.join(path_list) + + return parameter_list + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + src = 'SanityCheck.java' + obj = 'SanityCheck' + source_name = os.path.join(work_dir, src) + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write(textwrap.dedent( + '''class SanityCheck { + public static void main(String[] args) { + int i; + } + } + ''')) + pc = subprocess.Popen(self.exelist + [src], cwd=work_dir) + pc.wait() + if pc.returncode != 0: + raise EnvironmentException('Java compiler %s can not compile programs.' % self.name_string()) + runner = shutil.which(self.javarunner) + if runner: + cmdlist = [runner, obj] + pe = subprocess.Popen(cmdlist, cwd=work_dir) + pe.wait() + if pe.returncode != 0: + raise EnvironmentException('Executables created by Java compiler %s are not runnable.' % self.name_string()) + else: + m = "Java Virtual Machine wasn't found, but it's needed by Meson. " \ + "Please install a JRE.\nIf you have specific needs where this " \ + "requirement doesn't make sense, please open a bug at " \ + "https://github.com/mesonbuild/meson/issues/new and tell us " \ + "all about it." + raise EnvironmentException(m) + + def needs_static_linker(self) -> bool: + return False + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return [] diff --git a/meson/mesonbuild/compilers/mixins/__init__.py b/meson/mesonbuild/compilers/mixins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/meson/mesonbuild/compilers/mixins/arm.py b/meson/mesonbuild/compilers/mixins/arm.py new file mode 100644 index 000000000..4e1898ae7 --- /dev/null +++ b/meson/mesonbuild/compilers/mixins/arm.py @@ -0,0 +1,190 @@ +# Copyright 2012-2020 Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Representations specific to the arm family of compilers.""" + +import os +import typing as T + +from ... import mesonlib +from ...linkers import ArmClangDynamicLinker +from ...mesonlib import OptionKey +from ..compilers import clike_debug_args +from .clang import clang_color_args + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +arm_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +arm_optimization_args = { + '0': ['-O0'], + 'g': ['-g'], + '1': ['-O1'], + '2': [], # Compiler defaults to -O2 + '3': ['-O3', '-Otime'], + 's': ['-O3'], # Compiler defaults to -Ospace +} # type: T.Dict[str, T.List[str]] + +armclang_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +armclang_optimization_args = { + '0': [], # Compiler defaults to -O0 + 'g': ['-g'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Oz'] +} # type: T.Dict[str, T.List[str]] + + +class ArmCompiler(Compiler): + + """Functionality that is common to all ARM family compilers.""" + + def __init__(self) -> None: + if not self.is_cross: + raise mesonlib.EnvironmentException('armcc supports only cross-compilation.') + self.id = 'arm' + default_warn_args = [] # type: T.List[str] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + []} # type: T.Dict[str, T.List[str]] + # Assembly + self.can_compile_suffixes.add('s') + + def get_pic_args(self) -> T.List[str]: + # FIXME: Add /ropi, /rwpi, /fpic etc. qualifiers to --apcs + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return arm_buildtype_args[buildtype] + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return [] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['--depend_target', outtarget, '--depend', outfile, '--depend_single_line'] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + # FIXME: Add required arguments + # NOTE from armcc user guide: + # "Support for Precompiled Header (PCH) files is deprecated from ARM Compiler 5.05 + # onwards on all platforms. Note that ARM Compiler on Windows 8 never supported + # PCH files." + return [] + + def get_pch_suffix(self) -> str: + # NOTE from armcc user guide: + # "Support for Precompiled Header (PCH) files is deprecated from ARM Compiler 5.05 + # onwards on all platforms. Note that ARM Compiler on Windows 8 never supported + # PCH files." + return 'pch' + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return arm_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return clike_debug_args[is_debug] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list + + +class ArmclangCompiler(Compiler): + + def __init__(self) -> None: + if not self.is_cross: + raise mesonlib.EnvironmentException('armclang supports only cross-compilation.') + # Check whether 'armlink' is available in path + if not isinstance(self.linker, ArmClangDynamicLinker): + raise mesonlib.EnvironmentException(f'Unsupported Linker {self.linker.exelist}, must be armlink') + if not mesonlib.version_compare(self.version, '==' + self.linker.version): + raise mesonlib.EnvironmentException('armlink version does not match with compiler version') + self.id = 'armclang' + self.base_options = { + OptionKey(o) for o in + ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', + 'b_ndebug', 'b_staticpic', 'b_colorout']} + # Assembly + self.can_compile_suffixes.add('s') + + def get_pic_args(self) -> T.List[str]: + # PIC support is not enabled by default for ARM, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_colorout_args(self, colortype: str) -> T.List[str]: + return clang_color_args[colortype][:] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return armclang_buildtype_args[buildtype] + + def get_pch_suffix(self) -> str: + return 'gch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136 + # This flag is internal to Clang (or at least not documented on the man page) + # so it might change semantics at any time. + return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-MD', '-MT', outtarget, '-MF', outfile] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return armclang_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return clike_debug_args[is_debug] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list diff --git a/meson/mesonbuild/compilers/mixins/c2000.py b/meson/mesonbuild/compilers/mixins/c2000.py new file mode 100644 index 000000000..287aaa89e --- /dev/null +++ b/meson/mesonbuild/compilers/mixins/c2000.py @@ -0,0 +1,124 @@ +# Copyright 2012-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Representations specific to the Texas Instruments C2000 compiler family.""" + +import os +import typing as T + +from ...mesonlib import EnvironmentException + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +c2000_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +c2000_optimization_args = { + '0': ['-O0'], + 'g': ['-Ooff'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-04'] +} # type: T.Dict[str, T.List[str]] + +c2000_debug_args = { + False: [], + True: [] +} # type: T.Dict[bool, T.List[str]] + + +class C2000Compiler(Compiler): + + def __init__(self) -> None: + if not self.is_cross: + raise EnvironmentException('c2000 supports only cross-compilation.') + self.id = 'c2000' + # Assembly + self.can_compile_suffixes.add('asm') + default_warn_args = [] # type: T.List[str] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + []} # type: T.Dict[str, T.List[str]] + + def get_pic_args(self) -> T.List[str]: + # PIC support is not enabled by default for c2000, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return c2000_buildtype_args[buildtype] + + def get_pch_suffix(self) -> str: + return 'pch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_no_stdlib_link_args(self) -> T.List[str]: + return [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return c2000_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return c2000_debug_args[is_debug] + + @classmethod + def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]: + result = [] + for i in args: + if i.startswith('-D'): + i = '-define=' + i[2:] + if i.startswith('-I'): + i = '-include=' + i[2:] + if i.startswith('-Wl,-rpath='): + continue + elif i == '--print-search-dirs': + continue + elif i.startswith('-L'): + continue + result.append(i) + return result + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:9] == '-include=': + parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:])) + + return parameter_list diff --git a/meson/mesonbuild/compilers/mixins/ccrx.py b/meson/mesonbuild/compilers/mixins/ccrx.py new file mode 100644 index 000000000..eba4c455f --- /dev/null +++ b/meson/mesonbuild/compilers/mixins/ccrx.py @@ -0,0 +1,130 @@ +# Copyright 2012-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Representations specific to the Renesas CC-RX compiler family.""" + +import os +import typing as T + +from ...mesonlib import EnvironmentException + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +ccrx_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +ccrx_optimization_args = { + '0': ['-optimize=0'], + 'g': ['-optimize=0'], + '1': ['-optimize=1'], + '2': ['-optimize=2'], + '3': ['-optimize=max'], + 's': ['-optimize=2', '-size'] +} # type: T.Dict[str, T.List[str]] + +ccrx_debug_args = { + False: [], + True: ['-debug'] +} # type: T.Dict[bool, T.List[str]] + + +class CcrxCompiler(Compiler): + + if T.TYPE_CHECKING: + is_cross = True + can_compile_suffixes = set() # type: T.Set[str] + + def __init__(self) -> None: + if not self.is_cross: + raise EnvironmentException('ccrx supports only cross-compilation.') + self.id = 'ccrx' + # Assembly + self.can_compile_suffixes.add('src') + default_warn_args = [] # type: T.List[str] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + []} # type: T.Dict[str, T.List[str]] + + def get_pic_args(self) -> T.List[str]: + # PIC support is not enabled by default for CCRX, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return ccrx_buildtype_args[buildtype] + + def get_pch_suffix(self) -> str: + return 'pch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_no_stdlib_link_args(self) -> T.List[str]: + return [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return ccrx_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return ccrx_debug_args[is_debug] + + @classmethod + def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]: + result = [] + for i in args: + if i.startswith('-D'): + i = '-define=' + i[2:] + if i.startswith('-I'): + i = '-include=' + i[2:] + if i.startswith('-Wl,-rpath='): + continue + elif i == '--print-search-dirs': + continue + elif i.startswith('-L'): + continue + elif not i.startswith('-lib=') and i.endswith(('.a', '.lib')): + i = '-lib=' + i + result.append(i) + return result + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:9] == '-include=': + parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:])) + + return parameter_list diff --git a/meson/mesonbuild/compilers/mixins/clang.py b/meson/mesonbuild/compilers/mixins/clang.py new file mode 100644 index 000000000..f7e94928c --- /dev/null +++ b/meson/mesonbuild/compilers/mixins/clang.py @@ -0,0 +1,162 @@ +# Copyright 2019 The meson development team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstractions for the LLVM/Clang compiler family.""" + +import os +import shutil +import typing as T + +from ... import mesonlib +from ...linkers import AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker +from ...mesonlib import OptionKey +from ..compilers import CompileCheckMode +from .gnu import GnuLikeCompiler + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...dependencies import Dependency # noqa: F401 + +clang_color_args = { + 'auto': ['-fcolor-diagnostics'], + 'always': ['-fcolor-diagnostics'], + 'never': ['-fno-color-diagnostics'], +} # type: T.Dict[str, T.List[str]] + +clang_optimization_args = { + '0': ['-O0'], + 'g': ['-Og'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Os'], +} # type: T.Dict[str, T.List[str]] + +class ClangCompiler(GnuLikeCompiler): + + def __init__(self, defines: T.Optional[T.Dict[str, str]]): + super().__init__() + self.id = 'clang' + self.defines = defines or {} + self.base_options.update( + {OptionKey('b_colorout'), OptionKey('b_lto_threads'), OptionKey('b_lto_mode')}) + + # TODO: this really should be part of the linker base_options, but + # linkers don't have base_options. + if isinstance(self.linker, AppleDynamicLinker): + self.base_options.add(OptionKey('b_bitcode')) + # All Clang backends can also do LLVM IR + self.can_compile_suffixes.add('ll') + + def get_colorout_args(self, colortype: str) -> T.List[str]: + return clang_color_args[colortype][:] + + def has_builtin_define(self, define: str) -> bool: + return define in self.defines + + def get_builtin_define(self, define: str) -> T.Optional[str]: + return self.defines.get(define) + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return clang_optimization_args[optimization_level] + + def get_pch_suffix(self) -> str: + return 'pch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136 + # This flag is internal to Clang (or at least not documented on the man page) + # so it might change semantics at any time. + return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # Clang is different than GCC, it will return True when a symbol isn't + # defined in a header. Specifically this seems ot have something to do + # with functions that may be in a header on some systems, but not all of + # them. `strlcat` specifically with can trigger this. + myargs: T.List[str] = ['-Werror=implicit-function-declaration'] + if mode is CompileCheckMode.COMPILE: + myargs.extend(['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument']) + if mesonlib.version_compare(self.version, '>=3.6.0'): + myargs.append('-Werror=ignored-optimization-argument') + return super().get_compiler_check_args(mode) + myargs + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if extra_args is None: + extra_args = [] + # Starting with XCode 8, we need to pass this to force linker + # visibility to obey OS X/iOS/tvOS minimum version targets with + # -mmacosx-version-min, -miphoneos-version-min, -mtvos-version-min etc. + # https://github.com/Homebrew/homebrew-core/issues/3727 + # TODO: this really should be communicated by the linker + if isinstance(self.linker, AppleDynamicLinker) and mesonlib.version_compare(self.version, '>=8.0'): + extra_args.append('-Wl,-no_weak_imports') + return super().has_function(funcname, prefix, env, extra_args=extra_args, + dependencies=dependencies) + + def openmp_flags(self) -> T.List[str]: + if mesonlib.version_compare(self.version, '>=3.8.0'): + return ['-fopenmp'] + elif mesonlib.version_compare(self.version, '>=3.7.0'): + return ['-fopenmp=libomp'] + else: + # Shouldn't work, but it'll be checked explicitly in the OpenMP dependency. + return [] + + @classmethod + def use_linker_args(cls, linker: str) -> T.List[str]: + # Clang additionally can use a linker specified as a path, which GCC + # (and other gcc-like compilers) cannot. This is becuse clang (being + # llvm based) is retargetable, while GCC is not. + # + + # qcld: Qualcomm Snapdragon linker, based on LLVM + if linker == 'qcld': + return ['-fuse-ld=qcld'] + + if shutil.which(linker): + if not shutil.which(linker): + raise mesonlib.MesonException( + f'Cannot find linker {linker}.') + return [f'-fuse-ld={linker}'] + return super().use_linker_args(linker) + + def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: + # Clang only warns about unknown or ignored attributes, so force an + # error. + return ['-Werror=attributes'] + + def get_coverage_link_args(self) -> T.List[str]: + return ['--coverage'] + + def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]: + args: T.List[str] = [] + if mode == 'thin': + # Thin LTO requires the use of gold, lld, ld64, or lld-link + if not isinstance(self.linker, (AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker)): + raise mesonlib.MesonException(f"LLVM's thinLTO only works with gnu gold, lld, lld-link, and ld64, not {self.linker.id}") + args.append(f'-flto={mode}') + else: + assert mode == 'default', 'someone forgot to wire something up' + args.extend(super().get_lto_compile_args(threads=threads)) + return args + + def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]: + args = self.get_lto_compile_args(threads=threads, mode=mode) + # In clang -flto=0 means auto + if threads >= 0: + args.append(f'-flto-jobs={threads}') + return args diff --git a/meson/mesonbuild/compilers/mixins/clike.py b/meson/mesonbuild/compilers/mixins/clike.py new file mode 100644 index 000000000..09ad837b1 --- /dev/null +++ b/meson/mesonbuild/compilers/mixins/clike.py @@ -0,0 +1,1267 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +"""Mixin classes to be shared between C and C++ compilers. + +Without this we'll end up with awful diamond inherintance problems. The goal +of this is to have mixin's, which are classes that are designed *not* to be +standalone, they only work through inheritance. +""" + +import collections +import functools +import glob +import itertools +import os +import re +import subprocess +import typing as T +from pathlib import Path + +from ... import arglist +from ... import mesonlib +from ... import mlog +from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker +from ...mesonlib import LibType +from ...coredata import OptionKey +from .. import compilers +from ..compilers import CompileCheckMode +from .visualstudio import VisualStudioLikeCompiler + +if T.TYPE_CHECKING: + from ...dependencies import Dependency + from ..._typing import ImmutableListProtocol + from ...environment import Environment + from ...compilers.compilers import Compiler + from ...programs import ExternalProgram +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +GROUP_FLAGS = re.compile(r'''\.so (?:\.[0-9]+)? (?:\.[0-9]+)? (?:\.[0-9]+)?$ | + ^(?:-Wl,)?-l | + \.a$''', re.X) + +class CLikeCompilerArgs(arglist.CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') + + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') + dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + + def to_native(self, copy: bool = False) -> T.List[str]: + # This seems to be allowed, but could never work? + assert isinstance(self.compiler, compilers.Compiler), 'How did you get here' + + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which + # all act like (or are) gnu ld + # TODO: this could probably be added to the DynamicLinker instead + if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker)): + group_start = -1 + group_end = -1 + for i, each in enumerate(new): + if not GROUP_FLAGS.search(each): + continue + group_end = i + if group_start < 0: + # First occurrence of a library + group_start = i + if group_start >= 0: + # Last occurrence of a library + new.insert(group_end + 1, '-Wl,--end-group') + new.insert(group_start, '-Wl,--start-group') + # Remove system/default include paths added with -isystem + default_dirs = self.compiler.get_default_include_dirs() + if default_dirs: + bad_idx_list = [] # type: T.List[int] + for i, each in enumerate(new): + if not each.startswith('-isystem'): + continue + + # Remove the -isystem and the path if the path is a default path + if (each == '-isystem' and + i < (len(new) - 1) and + new[i + 1] in default_dirs): + bad_idx_list += [i, i + 1] + elif each.startswith('-isystem=') and each[9:] in default_dirs: + bad_idx_list += [i] + elif each[8:] in default_dirs: + bad_idx_list += [i] + for i in reversed(bad_idx_list): + new.pop(i) + return self.compiler.unix_args_to_native(new._container) + + def __repr__(self) -> str: + self.flush_pre_post() + return f'CLikeCompilerArgs({self.compiler!r}, {self._container!r})' + + +class CLikeCompiler(Compiler): + + """Shared bits for the C and CPP Compilers.""" + + if T.TYPE_CHECKING: + warn_args = {} # type: T.Dict[str, T.List[str]] + + # TODO: Replace this manual cache with functools.lru_cache + find_library_cache = {} # type: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]] + find_framework_cache = {} # type: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]] + internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS + + def __init__(self, exe_wrapper: T.Optional['ExternalProgram'] = None): + # If a child ObjC or CPP class has already set it, don't set it ourselves + self.can_compile_suffixes.add('h') + # If the exe wrapper was not found, pretend it wasn't set so that the + # sanity check is skipped and compiler checks use fallbacks. + if not exe_wrapper or not exe_wrapper.found() or not exe_wrapper.get_command(): + self.exe_wrapper = None + else: + self.exe_wrapper = exe_wrapper + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs: + # This is correct, mypy just doesn't understand co-operative inheritance + return CLikeCompilerArgs(self, args) + + def needs_static_linker(self) -> bool: + return True # When compiling static libraries, so yes. + + def get_always_args(self) -> T.List[str]: + ''' + Args that are always-on for all C compilers other than MSVC + ''' + return self.get_largefile_args() + + def get_no_stdinc_args(self) -> T.List[str]: + return ['-nostdinc'] + + def get_no_stdlib_link_args(self) -> T.List[str]: + return ['-nostdlib'] + + def get_warn_args(self, level: str) -> T.List[str]: + # TODO: this should be an enum + return self.warn_args[level] + + def get_no_warn_args(self) -> T.List[str]: + # Almost every compiler uses this for disabling warnings + return ['-w'] + + def get_depfile_suffix(self) -> str: + return 'd' + + def get_exelist(self) -> T.List[str]: + return self.exelist.copy() + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-E', '-P'] + + def get_compile_only_args(self) -> T.List[str]: + return ['-c'] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-o', target] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + if is_system: + return ['-isystem', path] + return ['-I' + path] + + def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]: + ''' + Get dirs from the compiler, either `libraries:` or `programs:` + ''' + return [] + + @functools.lru_cache() + def _get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> 'ImmutableListProtocol[str]': + # TODO: replace elf_class with enum + dirs = self.get_compiler_dirs(env, 'libraries') + if elf_class is None or elf_class == 0: + return dirs + + # if we do have an elf class for 32-bit or 64-bit, we want to check that + # the directory in question contains libraries of the appropriate class. Since + # system directories aren't mixed, we only need to check one file for each + # directory and go by that. If we can't check the file for some reason, assume + # the compiler knows what it's doing, and accept the directory anyway. + retval = [] + for d in dirs: + files = [f for f in os.listdir(d) if f.endswith('.so') and os.path.isfile(os.path.join(d, f))] + # if no files, accept directory and move on + if not files: + retval.append(d) + continue + + for f in files: + file_to_check = os.path.join(d, f) + try: + with open(file_to_check, 'rb') as fd: + header = fd.read(5) + # if file is not an ELF file, it's weird, but accept dir + # if it is elf, and the class matches, accept dir + if header[1:4] != b'ELF' or int(header[4]) == elf_class: + retval.append(d) + # at this point, it's an ELF file which doesn't match the + # appropriate elf_class, so skip this one + # stop scanning after the first successful read + break + except OSError: + # Skip the file if we can't read it + pass + + return retval + + def get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + """Wrap the lru_cache so that we return a new copy and don't allow + mutation of the cached value. + """ + return self._get_library_dirs(env, elf_class).copy() + + @functools.lru_cache() + def _get_program_dirs(self, env: 'Environment') -> 'ImmutableListProtocol[str]': + ''' + Programs used by the compiler. Also where toolchain DLLs such as + libstdc++-6.dll are found with MinGW. + ''' + return self.get_compiler_dirs(env, 'programs') + + def get_program_dirs(self, env: 'Environment') -> T.List[str]: + return self._get_program_dirs(env).copy() + + def get_pic_args(self) -> T.List[str]: + return ['-fPIC'] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return ['-include', os.path.basename(header)] + + def get_pch_name(self, header_name: str) -> str: + return os.path.basename(header_name) + '.' + self.get_pch_suffix() + + def get_default_include_dirs(self) -> T.List[str]: + return [] + + def gen_export_dynamic_link_args(self, env: 'Environment') -> T.List[str]: + return self.linker.export_dynamic_args(env) + + def gen_import_library_args(self, implibname: str) -> T.List[str]: + return self.linker.import_library_args(implibname) + + def _sanity_check_impl(self, work_dir: str, environment: 'Environment', + sname: str, code: str) -> None: + mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist)) + mlog.debug(f'Is cross compiler: {self.is_cross!s}.') + + source_name = os.path.join(work_dir, sname) + binname = sname.rsplit('.', 1)[0] + mode = CompileCheckMode.LINK + if self.is_cross: + binname += '_cross' + if self.exe_wrapper is None: + # Linking cross built apps is painful. You can't really + # tell if you should use -nostdlib or not and for example + # on OSX the compiler binary is the same but you need + # a ton of compiler flags to differentiate between + # arm and x86_64. So just compile. + mode = CompileCheckMode.COMPILE + cargs, largs = self._get_basic_compiler_args(environment, mode) + extra_flags = cargs + self.linker_to_compiler_args(largs) + + # Is a valid executable output for all toolchains and platforms + binname += '.exe' + # Write binary check source + binary_name = os.path.join(work_dir, binname) + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write(code) + # Compile sanity check + # NOTE: extra_flags must be added at the end. On MSVC, it might contain a '/link' argument + # after which all further arguments will be passed directly to the linker + cmdlist = self.exelist + [sname] + self.get_output_args(binname) + extra_flags + pc, stdo, stde = mesonlib.Popen_safe(cmdlist, cwd=work_dir) + mlog.debug('Sanity check compiler command line:', ' '.join(cmdlist)) + mlog.debug('Sanity check compile stdout:') + mlog.debug(stdo) + mlog.debug('-----\nSanity check compile stderr:') + mlog.debug(stde) + mlog.debug('-----') + if pc.returncode != 0: + raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} can not compile programs.') + # Run sanity check + if self.is_cross: + if self.exe_wrapper is None: + # Can't check if the binaries run so we have to assume they do + return + cmdlist = self.exe_wrapper.get_command() + [binary_name] + else: + cmdlist = [binary_name] + mlog.debug('Running test binary command: ' + ' '.join(cmdlist)) + try: + pe = subprocess.Popen(cmdlist) + except Exception as e: + raise mesonlib.EnvironmentException(f'Could not invoke sanity test executable: {e!s}.') + pe.wait() + if pe.returncode != 0: + raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.') + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'int main(void) { int class=0; return class; }\n' + return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) + + def check_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + code = f'''{prefix} + #include <{hname}>''' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies) + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + code = f'''{prefix} + #ifdef __has_include + #if !__has_include("{hname}") + #error "Header '{hname}' could not be found" + #endif + #else + #include <{hname}> + #endif''' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode='preprocess', disable_cache=disable_cache) + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + t = f'''{prefix} + #include <{hname}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -> T.Tuple[T.List[str], T.List[str]]: + cargs = [] # type: T.List[str] + largs = [] # type: T.List[str] + if mode is CompileCheckMode.LINK: + # Sometimes we need to manually select the CRT to use with MSVC. + # One example is when trying to do a compiler check that involves + # linking with static libraries since MSVC won't select a CRT for + # us in that case and will error out asking us to pick one. + try: + crt_val = env.coredata.options[OptionKey('b_vscrt')].value + buildtype = env.coredata.options[OptionKey('buildtype')].value + cargs += self.get_crt_compile_args(crt_val, buildtype) + except (KeyError, AttributeError): + pass + + # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env + sys_args = env.coredata.get_external_args(self.for_machine, self.language) + if isinstance(sys_args, str): + sys_args = [sys_args] + # Apparently it is a thing to inject linker flags both + # via CFLAGS _and_ LDFLAGS, even though the former are + # also used during linking. These flags can break + # argument checks. Thanks, Autotools. + cleaned_sys_args = self.remove_linkerlike_args(sys_args) + cargs += cleaned_sys_args + + if mode is CompileCheckMode.LINK: + ld_value = env.lookup_binary_entry(self.for_machine, self.language + '_ld') + if ld_value is not None: + largs += self.use_linker_args(ld_value[0]) + + # Add LDFLAGS from the env + sys_ld_args = env.coredata.get_external_link_args(self.for_machine, self.language) + # CFLAGS and CXXFLAGS go to both linking and compiling, but we want them + # to only appear on the command line once. Remove dupes. + largs += [x for x in sys_ld_args if x not in sys_args] + + cargs += self.get_compiler_args_for_mode(mode) + return cargs, largs + + def build_wrapper_args(self, env: 'Environment', + extra_args: T.Union[None, arglist.CompilerArgs, T.List[str]], + dependencies: T.Optional[T.List['Dependency']], + mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs: + # TODO: the caller should handle the listfing of these arguments + if extra_args is None: + extra_args = [] + else: + # TODO: we want to do this in the caller + extra_args = mesonlib.listify(extra_args) + extra_args = mesonlib.listify([e(mode.value) if callable(e) else e for e in extra_args]) + + if dependencies is None: + dependencies = [] + elif not isinstance(dependencies, collections.abc.Iterable): + # TODO: we want to ensure the front end does the listifing here + dependencies = [dependencies] # type: ignore + # Collect compiler arguments + cargs = self.compiler_args() # type: arglist.CompilerArgs + largs = [] # type: T.List[str] + for d in dependencies: + # Add compile flags needed by dependencies + cargs += d.get_compile_args() + if mode is CompileCheckMode.LINK: + # Add link flags needed to find dependencies + largs += d.get_link_args() + + ca, la = self._get_basic_compiler_args(env, mode) + cargs += ca + largs += la + + cargs += self.get_compiler_check_args(mode) + + # on MSVC compiler and linker flags must be separated by the "/link" argument + # at this point, the '/link' argument may already be part of extra_args, otherwise, it is added here + if self.linker_to_compiler_args([]) == ['/link'] and largs != [] and not ('/link' in extra_args): + extra_args += ['/link'] + + args = cargs + extra_args + largs + return args + + def run(self, code: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> compilers.RunResult: + need_exe_wrapper = env.need_exe_wrapper(self.for_machine) + if need_exe_wrapper and self.exe_wrapper is None: + raise compilers.CrossNoRunException('Can not run test applications in this cross environment.') + with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p: + if p.returncode != 0: + mlog.debug(f'Could not compile test file {p.input_name}: {p.returncode}\n') + return compilers.RunResult(False) + if need_exe_wrapper: + cmdlist = self.exe_wrapper.get_command() + [p.output_name] + else: + cmdlist = [p.output_name] + try: + pe, so, se = mesonlib.Popen_safe(cmdlist) + except Exception as e: + mlog.debug(f'Could not run: {cmdlist} (error: {e})\n') + return compilers.RunResult(False) + + mlog.debug('Program stdout:\n') + mlog.debug(so) + mlog.debug('Program stderr:\n') + mlog.debug(se) + return compilers.RunResult(True, pe.returncode, so, se) + + def _compile_int(self, expression: str, prefix: str, env: 'Environment', + extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> bool: + t = f'''#include + {prefix} + int main(void) {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies)[0] + + def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + # Try user's guess first + if isinstance(guess, int): + if self._compile_int(f'{expression} == {guess}', prefix, env, extra_args, dependencies): + return guess + + # If no bounds are given, compute them in the limit of int32 + maxint = 0x7fffffff + minint = -0x80000000 + if not isinstance(low, int) or not isinstance(high, int): + if self._compile_int(f'{expression} >= 0', prefix, env, extra_args, dependencies): + low = cur = 0 + while self._compile_int(f'{expression} > {cur}', prefix, env, extra_args, dependencies): + low = cur + 1 + if low > maxint: + raise mesonlib.EnvironmentException('Cross-compile check overflowed') + cur = cur * 2 + 1 + if cur > maxint: + cur = maxint + high = cur + else: + high = cur = -1 + while self._compile_int(f'{expression} < {cur}', prefix, env, extra_args, dependencies): + high = cur - 1 + if high < minint: + raise mesonlib.EnvironmentException('Cross-compile check overflowed') + cur = cur * 2 + if cur < minint: + cur = minint + low = cur + else: + # Sanity check limits given by user + if high < low: + raise mesonlib.EnvironmentException('high limit smaller than low limit') + condition = f'{expression} <= {high} && {expression} >= {low}' + if not self._compile_int(condition, prefix, env, extra_args, dependencies): + raise mesonlib.EnvironmentException('Value out of given range') + + # Binary search + while low != high: + cur = low + int((high - low) / 2) + if self._compile_int(f'{expression} <= {cur}', prefix, env, extra_args, dependencies): + high = cur + else: + low = cur + 1 + + return low + + def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + if self.is_cross: + return self.cross_compute_int(expression, low, high, guess, prefix, env, extra_args, dependencies) + t = f'''#include + {prefix} + int main(void) {{ + printf("%ld\\n", (long)({expression})); + return 0; + }};''' + res = self.run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + return -1 + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run compute_int test binary.') + return int(res.stdout) + + def cross_sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + t = f'''#include + {prefix} + int main(void) {{ + {typename} something; + return 0; + }}''' + if not self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies)[0]: + return -1 + return self.cross_compute_int(f'sizeof({typename})', None, None, None, prefix, env, extra_args, dependencies) + + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + if self.is_cross: + return self.cross_sizeof(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) + t = f'''#include + {prefix} + int main(void) {{ + printf("%ld\\n", (long)(sizeof({typename}))); + return 0; + }};''' + res = self.run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + return -1 + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run sizeof test binary.') + return int(res.stdout) + + def cross_alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + t = f'''#include + {prefix} + int main(void) {{ + {typename} something; + return 0; + }}''' + if not self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies)[0]: + return -1 + t = f'''#include + {prefix} + struct tmp {{ + char c; + {typename} target; + }};''' + return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t, env, extra_args, dependencies) + + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + if self.is_cross: + return self.cross_alignment(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) + t = f'''#include + #include + {prefix} + struct tmp {{ + char c; + {typename} target; + }}; + int main(void) {{ + printf("%d", (int)offsetof(struct tmp, target)); + return 0; + }}''' + res = self.run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + raise mesonlib.EnvironmentException('Could not compile alignment test.') + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run alignment test binary.') + align = int(res.stdout) + if align == 0: + raise mesonlib.EnvironmentException(f'Could not determine alignment of {typename}. Sorry. You might want to file a bug.') + return align + + def get_define(self, dname: str, prefix: str, env: 'Environment', + extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']], + disable_cache: bool = False) -> T.Tuple[str, bool]: + delim = '"MESON_GET_DEFINE_DELIMITER"' + code = f''' + {prefix} + #ifndef {dname} + # define {dname} + #endif + {delim}\n{dname}''' + args = self.build_wrapper_args(env, extra_args, dependencies, + mode=CompileCheckMode.PREPROCESS).to_native() + func = functools.partial(self.cached_compile, code, env.coredata, extra_args=args, mode='preprocess') + if disable_cache: + func = functools.partial(self.compile, code, extra_args=args, mode='preprocess', temp_dir=env.scratch_dir) + with func() as p: + cached = p.cached + if p.returncode != 0: + raise mesonlib.EnvironmentException(f'Could not get define {dname!r}') + # Get the preprocessed value after the delimiter, + # minus the extra newline at the end and + # merge string literals. + return self._concatenate_string_literals(p.stdout.split(delim + '\n')[-1][:-1]), cached + + def get_return_value(self, fname: str, rtype: str, prefix: str, + env: 'Environment', extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]: + # TODO: rtype should be an enum. + # TODO: maybe we can use overload to tell mypy when this will return int vs str? + if rtype == 'string': + fmt = '%s' + cast = '(char*)' + elif rtype == 'int': + fmt = '%lli' + cast = '(long long int)' + else: + raise AssertionError(f'BUG: Unknown return type {rtype!r}') + code = f'''{prefix} + #include + int main(void) {{ + printf ("{fmt}", {cast} {fname}()); + return 0; + }}''' + res = self.run(code, env, extra_args=extra_args, dependencies=dependencies) + if not res.compiled: + raise mesonlib.EnvironmentException(f'Could not get return value of {fname}()') + if rtype == 'string': + return res.stdout + elif rtype == 'int': + try: + return int(res.stdout.strip()) + except ValueError: + raise mesonlib.EnvironmentException(f'Return value of {fname}() is not an int') + assert False, 'Unreachable' + + @staticmethod + def _no_prototype_templ() -> T.Tuple[str, str]: + """ + Try to find the function without a prototype from a header by defining + our own dummy prototype and trying to link with the C library (and + whatever else the compiler links in by default). This is very similar + to the check performed by Autoconf for AC_CHECK_FUNCS. + """ + # Define the symbol to something else since it is defined by the + # includes or defines listed by the user or by the compiler. This may + # include, for instance _GNU_SOURCE which must be defined before + # limits.h, which includes features.h + # Then, undef the symbol to get rid of it completely. + head = ''' + #define {func} meson_disable_define_of_{func} + {prefix} + #include + #undef {func} + ''' + # Override any GCC internal prototype and declare our own definition for + # the symbol. Use char because that's unlikely to be an actual return + # value for a function which ensures that we override the definition. + head += ''' + #ifdef __cplusplus + extern "C" + #endif + char {func} (void); + ''' + # The actual function call + main = ''' + int main(void) {{ + return {func} (); + }}''' + return head, main + + @staticmethod + def _have_prototype_templ() -> T.Tuple[str, str]: + """ + Returns a head-er and main() call that uses the headers listed by the + user for the function prototype while checking if a function exists. + """ + # Add the 'prefix', aka defines, includes, etc that the user provides + # This may include, for instance _GNU_SOURCE which must be defined + # before limits.h, which includes features.h + head = '{prefix}\n#include \n' + # We don't know what the function takes or returns, so return it as an int. + # Just taking the address or comparing it to void is not enough because + # compilers are smart enough to optimize it away. The resulting binary + # is not run so we don't care what the return value is. + main = '''\nint main(void) {{ + void *a = (void*) &{func}; + long long b = (long long) a; + return (int) b; + }}''' + return head, main + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """Determine if a function exists. + + First, this function looks for the symbol in the default libraries + provided by the compiler (stdlib + a few others usually). If that + fails, it checks if any of the headers specified in the prefix provide + an implementation of the function, and if that fails, it checks if it's + implemented as a compiler-builtin. + """ + if extra_args is None: + extra_args = [] + + # Short-circuit if the check is already provided by the cross-info file + varname = 'has function ' + funcname + varname = varname.replace(' ', '_') + if self.is_cross: + val = env.properties.host.get(varname, None) + if val is not None: + if isinstance(val, bool): + return val, False + raise mesonlib.EnvironmentException(f'Cross variable {varname} is not a boolean.') + + # TODO: we really need a protocol for this, + # + # class StrProto(typing.Protocol): + # def __str__(self) -> str: ... + fargs = {'prefix': prefix, 'func': funcname} # type: T.Dict[str, T.Union[str, bool, int]] + + # glibc defines functions that are not available on Linux as stubs that + # fail with ENOSYS (such as e.g. lchmod). In this case we want to fail + # instead of detecting the stub as a valid symbol. + # We already included limits.h earlier to ensure that these are defined + # for stub functions. + stubs_fail = ''' + #if defined __stub_{func} || defined __stub___{func} + fail fail fail this function is not going to work + #endif + ''' + + # If we have any includes in the prefix supplied by the user, assume + # that the user wants us to use the symbol prototype defined in those + # includes. If not, then try to do the Autoconf-style check with + # a dummy prototype definition of our own. + # This is needed when the linker determines symbol availability from an + # SDK based on the prototype in the header provided by the SDK. + # Ignoring this prototype would result in the symbol always being + # marked as available. + if '#include' in prefix: + head, main = self._have_prototype_templ() + else: + head, main = self._no_prototype_templ() + templ = head + stubs_fail + main + + res, cached = self.links(templ.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) + if res: + return True, cached + + # MSVC does not have compiler __builtin_-s. + if self.get_id() in {'msvc', 'intel-cl'}: + return False, False + + # Detect function as a built-in + # + # Some functions like alloca() are defined as compiler built-ins which + # are inlined by the compiler and you can't take their address, so we + # need to look for them differently. On nice compilers like clang, we + # can just directly use the __has_builtin() macro. + fargs['no_includes'] = '#include' not in prefix + is_builtin = funcname.startswith('__builtin_') + fargs['is_builtin'] = is_builtin + fargs['__builtin_'] = '' if is_builtin else '__builtin_' + t = '''{prefix} + int main(void) {{ + + /* With some toolchains (MSYS2/mingw for example) the compiler + * provides various builtins which are not really implemented and + * fall back to the stdlib where they aren't provided and fail at + * build/link time. In case the user provides a header, including + * the header didn't lead to the function being defined, and the + * function we are checking isn't a builtin itself we assume the + * builtin is not functional and we just error out. */ + #if !{no_includes:d} && !defined({func}) && !{is_builtin:d} + #error "No definition for {__builtin_}{func} found in the prefix" + #endif + + #ifdef __has_builtin + #if !__has_builtin({__builtin_}{func}) + #error "{__builtin_}{func} not found" + #endif + #elif ! defined({func}) + {__builtin_}{func}; + #endif + return 0; + }}''' + return self.links(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) + + def has_members(self, typename: str, membernames: T.List[str], + prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if extra_args is None: + extra_args = [] + # Create code that accesses all members + members = '' + for member in membernames: + members += f'foo.{member};\n' + t = f'''{prefix} + void bar(void) {{ + {typename} foo; + {members} + }};''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def has_type(self, typename: str, prefix: str, env: 'Environment', extra_args: T.List[str], + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + t = f'''{prefix} + void bar(void) {{ + sizeof({typename}); + }};''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: + ''' + Check if the compiler prefixes an underscore to global C symbols + ''' + symbol_name = b'meson_uscore_prefix' + code = '''#ifdef __cplusplus + extern "C" { + #endif + void ''' + symbol_name.decode() + ''' (void) {} + #ifdef __cplusplus + } + #endif + ''' + args = self.get_compiler_check_args(CompileCheckMode.COMPILE) + n = 'symbols_have_underscore_prefix' + with self._build_wrapper(code, env, extra_args=args, mode='compile', want_output=True, temp_dir=env.scratch_dir) as p: + if p.returncode != 0: + raise RuntimeError(f'BUG: Unable to compile {n!r} check: {p.stdout}') + if not os.path.isfile(p.output_name): + raise RuntimeError(f'BUG: Can\'t find compiled test code for {n!r} check') + with open(p.output_name, 'rb') as o: + for line in o: + # Check if the underscore form of the symbol is somewhere + # in the output file. + if b'_' + symbol_name in line: + mlog.debug("Symbols have underscore prefix: YES") + return True + # Else, check if the non-underscored form is present + elif symbol_name in line: + mlog.debug("Symbols have underscore prefix: NO") + return False + raise RuntimeError(f'BUG: {n!r} check failed unexpectedly') + + def _get_patterns(self, env: 'Environment', prefixes: T.List[str], suffixes: T.List[str], shared: bool = False) -> T.List[str]: + patterns = [] # type: T.List[str] + for p in prefixes: + for s in suffixes: + patterns.append(p + '{}.' + s) + if shared and env.machines[self.for_machine].is_openbsd(): + # Shared libraries on OpenBSD can be named libfoo.so.X.Y: + # https://www.openbsd.org/faq/ports/specialtopics.html#SharedLibs + # + # This globbing is probably the best matching we can do since regex + # is expensive. It's wrong in many edge cases, but it will match + # correctly-named libraries and hopefully no one on OpenBSD names + # their files libfoo.so.9a.7b.1.0 + for p in prefixes: + patterns.append(p + '{}.so.[0-9]*.[0-9]*') + return patterns + + def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool = False) -> T.Tuple[str, ...]: + ''' + Get library prefixes and suffixes for the target platform ordered by + priority + ''' + stlibext = ['a'] + # We've always allowed libname to be both `foo` and `libfoo`, and now + # people depend on it. Also, some people use prebuilt `foo.so` instead + # of `libfoo.so` for unknown reasons, and may also want to create + # `foo.so` by setting name_prefix to '' + if strict and not isinstance(self, VisualStudioLikeCompiler): # lib prefix is not usually used with msvc + prefixes = ['lib'] + else: + prefixes = ['lib', ''] + # Library suffixes and prefixes + if env.machines[self.for_machine].is_darwin(): + shlibext = ['dylib', 'so'] + elif env.machines[self.for_machine].is_windows(): + # FIXME: .lib files can be import or static so we should read the + # file, figure out which one it is, and reject the wrong kind. + if isinstance(self, VisualStudioLikeCompiler): + shlibext = ['lib'] + else: + shlibext = ['dll.a', 'lib', 'dll'] + # Yep, static libraries can also be foo.lib + stlibext += ['lib'] + elif env.machines[self.for_machine].is_cygwin(): + shlibext = ['dll', 'dll.a'] + prefixes = ['cyg'] + prefixes + else: + # Linux/BSDs + shlibext = ['so'] + # Search priority + if libtype is LibType.PREFER_SHARED: + patterns = self._get_patterns(env, prefixes, shlibext, True) + patterns.extend([x for x in self._get_patterns(env, prefixes, stlibext, False) if x not in patterns]) + elif libtype is LibType.PREFER_STATIC: + patterns = self._get_patterns(env, prefixes, stlibext, False) + patterns.extend([x for x in self._get_patterns(env, prefixes, shlibext, True) if x not in patterns]) + elif libtype is LibType.SHARED: + patterns = self._get_patterns(env, prefixes, shlibext, True) + else: + assert libtype is LibType.STATIC + patterns = self._get_patterns(env, prefixes, stlibext, False) + return tuple(patterns) + + @staticmethod + def _sort_shlibs_openbsd(libs: T.List[str]) -> T.List[str]: + filtered = [] # type: T.List[str] + for lib in libs: + # Validate file as a shared library of type libfoo.so.X.Y + ret = lib.rsplit('.so.', maxsplit=1) + if len(ret) != 2: + continue + try: + float(ret[1]) + except ValueError: + continue + filtered.append(lib) + float_cmp = lambda x: float(x.rsplit('.so.', maxsplit=1)[1]) + return sorted(filtered, key=float_cmp, reverse=True) + + @classmethod + def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[Path]: + f = Path(directory) / pattern.format(libname) + # Globbing for OpenBSD + if '*' in pattern: + # NOTE: globbing matches directories and broken symlinks + # so we have to do an isfile test on it later + return [Path(x) for x in cls._sort_shlibs_openbsd(glob.glob(str(f)))] + return [f] + + @staticmethod + def _get_file_from_list(env: 'Environment', paths: T.List[Path]) -> Path: + ''' + We just check whether the library exists. We can't do a link check + because the library might have unresolved symbols that require other + libraries. On macOS we check if the library matches our target + architecture. + ''' + # If not building on macOS for Darwin, do a simple file check + if not env.machines.host.is_darwin() or not env.machines.build.is_darwin(): + for p in paths: + if p.is_file(): + return p + # Run `lipo` and check if the library supports the arch we want + for p in paths: + if not p.is_file(): + continue + archs = mesonlib.darwin_get_object_archs(str(p)) + if archs and env.machines.host.cpu_family in archs: + return p + else: + mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}') + return None + + @functools.lru_cache() + def output_is_64bit(self, env: 'Environment') -> bool: + ''' + returns true if the output produced is 64-bit, false if 32-bit + ''' + return self.sizeof('void *', '', env) == 8 + + def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType) -> T.Optional[T.List[str]]: + # First try if we can just add the library as -l. + # Gcc + co seem to prefer builtin lib dirs to -L dirs. + # Only try to find std libs if no extra dirs specified. + # The built-in search procedure will always favour .so and then always + # search for .a. This is only allowed if libtype is LibType.PREFER_SHARED + if ((not extra_dirs and libtype is LibType.PREFER_SHARED) or + libname in self.internal_libs): + cargs = ['-l' + libname] + largs = self.get_linker_always_args() + self.get_allow_undefined_link_args() + extra_args = cargs + self.linker_to_compiler_args(largs) + + if self.links(code, env, extra_args=extra_args, disable_cache=True)[0]: + return cargs + # Don't do a manual search for internal libs + if libname in self.internal_libs: + return None + # Not found or we want to use a specific libtype? Try to find the + # library file itself. + patterns = self.get_library_naming(env, libtype) + # try to detect if we are 64-bit or 32-bit. If we can't + # detect, we will just skip path validity checks done in + # get_library_dirs() call + try: + if self.output_is_64bit(env): + elf_class = 2 + else: + elf_class = 1 + except (mesonlib.MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here + elf_class = 0 + # Search in the specified dirs, and then in the system libraries + for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)): + for p in patterns: + trials = self._get_trials_from_pattern(p, d, libname) + if not trials: + continue + trial = self._get_file_from_list(env, trials) + if not trial: + continue + return [trial.as_posix()] + return None + + def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + code: str, libtype: LibType) -> T.Optional[T.List[str]]: + # These libraries are either built-in or invalid + if libname in self.ignore_libs: + return [] + if isinstance(extra_dirs, str): + extra_dirs = [extra_dirs] + key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype) + if key not in self.find_library_cache: + value = self._find_library_real(libname, env, extra_dirs, code, libtype) + self.find_library_cache[key] = value + else: + value = self.find_library_cache[key] + if value is None: + return None + return value.copy() + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: + code = 'int main(void) { return 0; }\n' + return self._find_library_impl(libname, env, extra_dirs, code, libtype) + + def find_framework_paths(self, env: 'Environment') -> T.List[str]: + ''' + These are usually /Library/Frameworks and /System/Library/Frameworks, + unless you select a particular macOS SDK with the -isysroot flag. + You can also add to this by setting -F in CFLAGS. + ''' + # TODO: this really needs to be *AppleClang*, not just any clang. + if self.id != 'clang': + raise mesonlib.MesonException('Cannot find framework path with non-clang compiler') + # Construct the compiler command-line + commands = self.get_exelist() + ['-v', '-E', '-'] + commands += self.get_always_args() + # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env + commands += env.coredata.get_external_args(self.for_machine, self.language) + mlog.debug('Finding framework path by running: ', ' '.join(commands), '\n') + os_env = os.environ.copy() + os_env['LC_ALL'] = 'C' + _, _, stde = mesonlib.Popen_safe(commands, env=os_env, stdin=subprocess.PIPE) + paths = [] # T.List[str] + for line in stde.split('\n'): + if '(framework directory)' not in line: + continue + # line is of the form: + # ` /path/to/framework (framework directory)` + paths.append(line[:-21].strip()) + return paths + + def _find_framework_real(self, name: str, env: 'Environment', extra_dirs: T.List[str], allow_system: bool) -> T.Optional[T.List[str]]: + code = 'int main(void) { return 0; }' + link_args = [] + for d in extra_dirs: + link_args += ['-F' + d] + # We can pass -Z to disable searching in the system frameworks, but + # then we must also pass -L/usr/lib to pick up libSystem.dylib + extra_args = [] if allow_system else ['-Z', '-L/usr/lib'] + link_args += ['-framework', name] + if self.links(code, env, extra_args=(extra_args + link_args), disable_cache=True)[0]: + return link_args + return None + + def _find_framework_impl(self, name: str, env: 'Environment', extra_dirs: T.List[str], + allow_system: bool) -> T.Optional[T.List[str]]: + if isinstance(extra_dirs, str): + extra_dirs = [extra_dirs] + key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system) + if key in self.find_framework_cache: + value = self.find_framework_cache[key] + else: + value = self._find_framework_real(name, env, extra_dirs, allow_system) + self.find_framework_cache[key] = value + if value is None: + return None + return value.copy() + + def find_framework(self, name: str, env: 'Environment', extra_dirs: T.List[str], + allow_system: bool = True) -> T.Optional[T.List[str]]: + ''' + Finds the framework with the specified name, and returns link args for + the same or returns None when the framework is not found. + ''' + # TODO: maybe this belongs in clang? also, should probably check for macOS? + if self.id != 'clang': + raise mesonlib.MesonException('Cannot find frameworks with non-clang compiler') + return self._find_framework_impl(name, env, extra_dirs, allow_system) + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? + return [] + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? + return [] + + def thread_flags(self, env: 'Environment') -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? + host_m = env.machines[self.for_machine] + if host_m.is_haiku() or host_m.is_darwin(): + return [] + return ['-pthread'] + + def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]: + return args.copy() + + def has_arguments(self, args: T.List[str], env: 'Environment', code: str, + mode: str) -> T.Tuple[bool, bool]: + return self.compiles(code, env, extra_args=args, mode=mode) + + def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]: + new_args = [] # type: T.List[str] + for arg in args: + # some compilers, e.g. GCC, don't warn for unsupported warning-disable + # flags, so when we are testing a flag like "-Wno-forgotten-towel", also + # check the equivalent enable flag too "-Wforgotten-towel" + if arg.startswith('-Wno-'): + new_args.append('-W' + arg[5:]) + if arg.startswith('-Wl,'): + mlog.warning(f'{arg} looks like a linker argument, ' + 'but has_argument and other similar methods only ' + 'support checking compiler arguments. Using them ' + 'to check linker arguments are never supported, ' + 'and results are likely to be wrong regardless of ' + 'the compiler you are using. has_link_argument or ' + 'other similar method can be used instead.') + new_args.append(arg) + return self.has_arguments(new_args, env, code, mode='compile') + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_arguments(args, env, 'extern int i;\nint i;\n') + + def _has_multi_link_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]: + # First time we check for link flags we need to first check if we have + # --fatal-warnings, otherwise some linker checks could give some + # false positive. + args = self.linker.fatal_warnings() + args + args = self.linker_to_compiler_args(args) + return self.has_arguments(args, env, code, mode='link') + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_link_arguments(args, env, 'int main(void) { return 0; }\n') + + @staticmethod + def _concatenate_string_literals(s: str) -> str: + pattern = re.compile(r'(?P
.*([^\\]")|^")(?P([^\\"]|\\.)*)"\s+"(?P([^\\"]|\\.)*)(?P".*)')
+        ret = s
+        m = pattern.match(ret)
+        while m:
+            ret = ''.join(m.group('pre', 'str1', 'str2', 'post'))
+            m = pattern.match(ret)
+        return ret
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        # Most compilers (such as GCC and Clang) only warn about unknown or
+        # ignored attributes, so force an error. Overridden in GCC and Clang
+        # mixins.
+        return ['-Werror']
+
+    def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+        # Just assume that if we're not on windows that dllimport and dllexport
+        # don't work
+        m = env.machines[self.for_machine]
+        if not (m.is_windows() or m.is_cygwin()):
+            if name in ['dllimport', 'dllexport']:
+                return False, False
+
+        return self.compiles(self.attribute_check_func(name), env,
+                             extra_args=self.get_has_func_attribute_extra_args(name))
+
+    def get_disable_assert_args(self) -> T.List[str]:
+        return ['-DNDEBUG']
diff --git a/meson/mesonbuild/compilers/mixins/compcert.py b/meson/mesonbuild/compilers/mixins/compcert.py
new file mode 100644
index 000000000..3211f6af2
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/compcert.py
@@ -0,0 +1,131 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the CompCert C compiler family."""
+
+import os
+import re
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+ccomp_buildtype_args = {
+    'plain': [''],
+    'debug': ['-O0', '-g'],
+    'debugoptimized': ['-O0', '-g'],
+    'release': ['-03'],
+    'minsize': ['-Os'],
+    'custom': ['-Obranchless'],
+}  # type: T.Dict[str, T.List[str]]
+
+ccomp_optimization_args = {
+    '0': ['-O0'],
+    'g': ['-O0'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}  # type: T.Dict[str, T.List[str]]
+
+ccomp_debug_args = {
+    False: [],
+    True: ['-g']
+}  # type: T.Dict[bool, T.List[str]]
+
+# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,)
+# There are probably (many) more, but these are those used by picolibc
+ccomp_args_to_wul = [
+        r"^-ffreestanding$",
+        r"^-r$"
+] # type: T.List[str]
+
+class CompCertCompiler(Compiler):
+
+    def __init__(self) -> None:
+        self.id = 'ccomp'
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        default_warn_args = []  # type: T.List[str]
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + [],
+                          '3': default_warn_args + []}  # type: T.Dict[str, T.List[str]]
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # As of now, CompCert does not support PIC
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return ccomp_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
+        "Always returns a copy that can be independently mutated"
+        patched_args = []  # type: T.List[str]
+        for arg in args:
+            added = 0
+            for ptrn in ccomp_args_to_wul:
+                if re.match(ptrn, arg):
+                    patched_args.append('-WUl,' + arg)
+                    added = 1
+            if not added:
+                patched_args.append(arg)
+        return patched_args
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_preprocess_only_args(self) -> T.List[str]:
+        return ['-E']
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['-nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return ccomp_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return ccomp_debug_args[is_debug]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff --git a/meson/mesonbuild/compilers/mixins/elbrus.py b/meson/mesonbuild/compilers/mixins/elbrus.py
new file mode 100644
index 000000000..16f621005
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/elbrus.py
@@ -0,0 +1,82 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the Elbrus family of compilers."""
+
+import os
+import typing as T
+import subprocess
+import re
+
+from .gnu import GnuLikeCompiler
+from .gnu import gnu_optimization_args
+from ...mesonlib import Popen_safe, OptionKey
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+
+
+class ElbrusCompiler(GnuLikeCompiler):
+    # Elbrus compiler is nearly like GCC, but does not support
+    # PCH, LTO, sanitizers and color output as of version 1.21.x.
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.id = 'lcc'
+        self.base_options = {OptionKey(o) for o in ['b_pgo', 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded']}
+
+    # FIXME: use _build_wrapper to call this so that linker flags from the env
+    # get applied
+    def get_library_dirs(self, env: 'Environment', elf_class: T.Optional[int] = None) -> T.List[str]:
+        os_env = os.environ.copy()
+        os_env['LC_ALL'] = 'C'
+        stdo = Popen_safe(self.exelist + ['--print-search-dirs'], env=os_env)[1]
+        for line in stdo.split('\n'):
+            if line.startswith('libraries:'):
+                # lcc does not include '=' in --print-search-dirs output. Also it could show nonexistent dirs.
+                libstr = line.split(' ', 1)[1]
+                return [os.path.realpath(p) for p in libstr.split(':') if os.path.exists(p)]
+        return []
+
+    def get_program_dirs(self, env: 'Environment') -> T.List[str]:
+        os_env = os.environ.copy()
+        os_env['LC_ALL'] = 'C'
+        stdo = Popen_safe(self.exelist + ['--print-search-dirs'], env=os_env)[1]
+        for line in stdo.split('\n'):
+            if line.startswith('programs:'):
+                # lcc does not include '=' in --print-search-dirs output.
+                libstr = line.split(' ', 1)[1]
+                return [os.path.realpath(p) for p in libstr.split(':')]
+        return []
+
+    def get_default_include_dirs(self) -> T.List[str]:
+        os_env = os.environ.copy()
+        os_env['LC_ALL'] = 'C'
+        p = subprocess.Popen(self.exelist + ['-xc', '-E', '-v', '-'], env=os_env, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stderr = p.stderr.read().decode('utf-8', errors='replace')
+        includes = []
+        for line in stderr.split('\n'):
+            if line.lstrip().startswith('--sys_include'):
+                includes.append(re.sub(r'\s*\\$', '', re.sub(r'^\s*--sys_include\s*', '', line)))
+        return includes
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return gnu_optimization_args[optimization_level]
+
+    def get_pch_suffix(self) -> str:
+        # Actually it's not supported for now, but probably will be supported in future
+        return 'pch'
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['-fopenmp']
diff --git a/meson/mesonbuild/compilers/mixins/emscripten.py b/meson/mesonbuild/compilers/mixins/emscripten.py
new file mode 100644
index 000000000..226cc1531
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/emscripten.py
@@ -0,0 +1,69 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provides a mixin for shared code between C and C++ Emscripten compilers."""
+
+import os.path
+import typing as T
+
+from ... import coredata
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+
+class EmscriptenMixin(Compiler):
+
+    def _get_compile_output(self, dirname: str, mode: str) -> str:
+        # In pre-processor mode, the output is sent to stdout and discarded
+        if mode == 'preprocess':
+            return None
+        # Unlike sane toolchains, emcc infers the kind of output from its name.
+        # This is the only reason why this method is overridden; compiler tests
+        # do not work well with the default exe/obj suffices.
+        if mode == 'link':
+            suffix = 'js'
+        else:
+            suffix = 'o'
+        return os.path.join(dirname, 'output.' + suffix)
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return ['-s', 'USE_PTHREADS=1']
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        args = ['-s', 'USE_PTHREADS=1']
+        count: int = env.coredata.options[OptionKey('thread_count', lang=self.language, machine=self.for_machine)].value
+        if count:
+            args.extend(['-s', f'PTHREAD_POOL_SIZE={count}'])
+        return args
+
+    def get_options(self) -> 'coredata.KeyedOptionDictType':
+        opts = super().get_options()
+        key = OptionKey('thread_count', machine=self.for_machine, lang=self.language)
+        opts.update({
+            key: coredata.UserIntegerOption(
+                'Number of threads to use in web assembly, set to 0 to disable',
+                (0, None, 4),  # Default was picked at random
+            ),
+        })
+
+        return opts
diff --git a/meson/mesonbuild/compilers/mixins/gnu.py b/meson/mesonbuild/compilers/mixins/gnu.py
new file mode 100644
index 000000000..bc40af494
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/gnu.py
@@ -0,0 +1,398 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provides mixins for GNU compilers and GNU-like compilers."""
+
+import abc
+import functools
+import os
+import multiprocessing
+import pathlib
+import re
+import subprocess
+import typing as T
+
+from ... import mesonlib
+from ... import mlog
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+    from ..._typing import ImmutableListProtocol
+    from ...environment import Environment
+    from ..compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+# XXX: prevent circular references.
+# FIXME: this really is a posix interface not a c-like interface
+clike_debug_args = {
+    False: [],
+    True: ['-g'],
+}  # type: T.Dict[bool, T.List[str]]
+
+gnulike_buildtype_args = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}  # type: T.Dict[str, T.List[str]]
+
+gnu_optimization_args = {
+    '0': [],
+    'g': ['-Og'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os'],
+}  # type: T.Dict[str, T.List[str]]
+
+gnulike_instruction_set_args = {
+    'mmx': ['-mmmx'],
+    'sse': ['-msse'],
+    'sse2': ['-msse2'],
+    'sse3': ['-msse3'],
+    'ssse3': ['-mssse3'],
+    'sse41': ['-msse4.1'],
+    'sse42': ['-msse4.2'],
+    'avx': ['-mavx'],
+    'avx2': ['-mavx2'],
+    'neon': ['-mfpu=neon'],
+}  # type: T.Dict[str, T.List[str]]
+
+gnu_symbol_visibility_args = {
+    '': [],
+    'default': ['-fvisibility=default'],
+    'internal': ['-fvisibility=internal'],
+    'hidden': ['-fvisibility=hidden'],
+    'protected': ['-fvisibility=protected'],
+    'inlineshidden': ['-fvisibility=hidden', '-fvisibility-inlines-hidden'],
+}  # type: T.Dict[str, T.List[str]]
+
+gnu_color_args = {
+    'auto': ['-fdiagnostics-color=auto'],
+    'always': ['-fdiagnostics-color=always'],
+    'never': ['-fdiagnostics-color=never'],
+}  # type: T.Dict[str, T.List[str]]
+
+
+@functools.lru_cache(maxsize=None)
+def gnulike_default_include_dirs(compiler: T.Tuple[str, ...], lang: str) -> 'ImmutableListProtocol[str]':
+    lang_map = {
+        'c': 'c',
+        'cpp': 'c++',
+        'objc': 'objective-c',
+        'objcpp': 'objective-c++'
+    }
+    if lang not in lang_map:
+        return []
+    lang = lang_map[lang]
+    env = os.environ.copy()
+    env["LC_ALL"] = 'C'
+    cmd = list(compiler) + [f'-x{lang}', '-E', '-v', '-']
+    p = subprocess.Popen(
+        cmd,
+        stdin=subprocess.DEVNULL,
+        stderr=subprocess.STDOUT,
+        stdout=subprocess.PIPE,
+        env=env
+    )
+    stdout = p.stdout.read().decode('utf-8', errors='replace')
+    parse_state = 0
+    paths = []  # type: T.List[str]
+    for line in stdout.split('\n'):
+        line = line.strip(' \n\r\t')
+        if parse_state == 0:
+            if line == '#include "..." search starts here:':
+                parse_state = 1
+        elif parse_state == 1:
+            if line == '#include <...> search starts here:':
+                parse_state = 2
+            else:
+                paths.append(line)
+        elif parse_state == 2:
+            if line == 'End of search list.':
+                break
+            else:
+                paths.append(line)
+    if not paths:
+        mlog.warning('No include directory found parsing "{cmd}" output'.format(cmd=" ".join(cmd)))
+    # Append a normalized copy of paths to make path lookup easier
+    paths += [os.path.normpath(x) for x in paths]
+    return paths
+
+
+class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta):
+    """
+    GnuLikeCompiler is a common interface to all compilers implementing
+    the GNU-style commandline interface. This includes GCC, Clang
+    and ICC. Certain functionality between them is different and requires
+    that the actual concrete subclass define their own implementation.
+    """
+
+    LINKER_PREFIX = '-Wl,'
+
+    def __init__(self) -> None:
+        self.base_options = {
+            OptionKey(o) for o in ['b_pch', 'b_lto', 'b_pgo', 'b_coverage',
+                                   'b_ndebug', 'b_staticpic', 'b_pie']}
+        if not (self.info.is_windows() or self.info.is_cygwin() or self.info.is_openbsd()):
+            self.base_options.add(OptionKey('b_lundef'))
+        if not self.info.is_windows() or self.info.is_cygwin():
+            self.base_options.add(OptionKey('b_asneeded'))
+        if not self.info.is_hurd():
+            self.base_options.add(OptionKey('b_sanitize'))
+        # All GCC-like backends can do assembly
+        self.can_compile_suffixes.add('s')
+
+    def get_pic_args(self) -> T.List[str]:
+        if self.info.is_windows() or self.info.is_cygwin() or self.info.is_darwin():
+            return [] # On Window and OS X, pic is always on.
+        return ['-fPIC']
+
+    def get_pie_args(self) -> T.List[str]:
+        return ['-fPIE']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return gnulike_buildtype_args[buildtype]
+
+    @abc.abstractmethod
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        pass
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    @abc.abstractmethod
+    def get_pch_suffix(self) -> str:
+        pass
+
+    def split_shlib_to_parts(self, fname: str) -> T.Tuple[str, str]:
+        return os.path.dirname(fname), fname
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        return gnulike_instruction_set_args.get(instruction_set, None)
+
+    def get_default_include_dirs(self) -> T.List[str]:
+        return gnulike_default_include_dirs(tuple(self.exelist), self.language).copy()
+
+    @abc.abstractmethod
+    def openmp_flags(self) -> T.List[str]:
+        pass
+
+    def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]:
+        return gnu_symbol_visibility_args[vistype]
+
+    def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
+        if not isinstance(defsfile, str):
+            raise RuntimeError('Module definitions file should be str')
+        # On Windows targets, .def files may be specified on the linker command
+        # line like an object file.
+        if self.info.is_windows() or self.info.is_cygwin():
+            return [defsfile]
+        # For other targets, discard the .def file.
+        return []
+
+    def get_argument_syntax(self) -> str:
+        return 'gcc'
+
+    def get_profile_generate_args(self) -> T.List[str]:
+        return ['-fprofile-generate']
+
+    def get_profile_use_args(self) -> T.List[str]:
+        return ['-fprofile-use', '-fprofile-correction']
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return ['-mwindows' if value else '-mconsole']
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '-L':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+        return parameter_list
+
+    @functools.lru_cache()
+    def _get_search_dirs(self, env: 'Environment') -> str:
+        extra_args = ['--print-search-dirs']
+        with self._build_wrapper('', env, extra_args=extra_args,
+                                 dependencies=None, mode='compile',
+                                 want_output=True) as p:
+            return p.stdout
+
+    def _split_fetch_real_dirs(self, pathstr: str) -> T.List[str]:
+        # We need to use the path separator used by the compiler for printing
+        # lists of paths ("gcc --print-search-dirs"). By default
+        # we assume it uses the platform native separator.
+        pathsep = os.pathsep
+
+        # clang uses ':' instead of ';' on Windows https://reviews.llvm.org/D61121
+        # so we need to repair things like 'C:\foo:C:\bar'
+        if pathsep == ';':
+            pathstr = re.sub(r':([^/\\])', r';\1', pathstr)
+
+        # pathlib treats empty paths as '.', so filter those out
+        paths = [p for p in pathstr.split(pathsep) if p]
+
+        result = []
+        for p in paths:
+            # GCC returns paths like this:
+            # /usr/lib/gcc/x86_64-linux-gnu/8/../../../../x86_64-linux-gnu/lib
+            # It would make sense to normalize them to get rid of the .. parts
+            # Sadly when you are on a merged /usr fs it also kills these:
+            # /lib/x86_64-linux-gnu
+            # since /lib is a symlink to /usr/lib. This would mean
+            # paths under /lib would be considered not a "system path",
+            # which is wrong and breaks things. Store everything, just to be sure.
+            pobj = pathlib.Path(p)
+            unresolved = pobj.as_posix()
+            if pobj.exists():
+                if unresolved not in result:
+                    result.append(unresolved)
+                try:
+                    resolved = pathlib.Path(p).resolve().as_posix()
+                    if resolved not in result:
+                        result.append(resolved)
+                except FileNotFoundError:
+                    pass
+        return result
+
+    def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]:
+        '''
+        Get dirs from the compiler, either `libraries:` or `programs:`
+        '''
+        stdo = self._get_search_dirs(env)
+        for line in stdo.split('\n'):
+            if line.startswith(name + ':'):
+                return self._split_fetch_real_dirs(line.split('=', 1)[1])
+        return []
+
+    def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+        # This provides a base for many compilers, GCC and Clang override this
+        # for their specific arguments
+        return ['-flto']
+
+    def sanitizer_compile_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        args = ['-fsanitize=' + value]
+        if 'address' in value:  # for -fsanitize=address,undefined
+            args.append('-fno-omit-frame-pointer')
+        return args
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-o', target]
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['-MD', '-MQ', outtarget, '-MF', outfile]
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if not path:
+            path = '.'
+        if is_system:
+            return ['-isystem' + path]
+        return ['-I' + path]
+
+    @classmethod
+    def use_linker_args(cls, linker: str) -> T.List[str]:
+        if linker not in {'gold', 'bfd', 'lld'}:
+            raise mesonlib.MesonException(
+                f'Unsupported linker, only bfd, gold, and lld are supported, not {linker}.')
+        return [f'-fuse-ld={linker}']
+
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
+
+class GnuCompiler(GnuLikeCompiler):
+    """
+    GnuCompiler represents an actual GCC in its many incarnations.
+    Compilers imitating GCC (Clang/Intel) should use the GnuLikeCompiler ABC.
+    """
+
+    def __init__(self, defines: T.Optional[T.Dict[str, str]]):
+        super().__init__()
+        self.id = 'gcc'
+        self.defines = defines or {}
+        self.base_options.update({OptionKey('b_colorout'), OptionKey('b_lto_threads')})
+
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
+        if mesonlib.version_compare(self.version, '>=4.9.0'):
+            return gnu_color_args[colortype][:]
+        return []
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        # Mypy doesn't understand cooperative inheritance
+        args = super().get_warn_args(level)
+        if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
+            # -Wpedantic was added in 4.8.0
+            # https://gcc.gnu.org/gcc-4.8/changes.html
+            args[args.index('-Wpedantic')] = '-pedantic'
+        return args
+
+    def has_builtin_define(self, define: str) -> bool:
+        return define in self.defines
+
+    def get_builtin_define(self, define: str) -> T.Optional[str]:
+        if define in self.defines:
+            return self.defines[define]
+        return None
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return gnu_optimization_args[optimization_level]
+
+    def get_pch_suffix(self) -> str:
+        return 'gch'
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['-fopenmp']
+
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str,
+                      mode: str) -> T.Tuple[bool, bool]:
+        # For some compiler command line arguments, the GNU compilers will
+        # emit a warning on stderr indicating that an option is valid for a
+        # another language, but still complete with exit_success
+        with self._build_wrapper(code, env, args, None, mode) as p:
+            result = p.returncode == 0
+            if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stderr:
+                result = False
+            if self.language in {'c', 'objc'} and 'is valid for C++/ObjC++' in p.stderr:
+                result = False
+        return result, p.cached
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        # GCC only warns about unknown or ignored attributes, so force an
+        # error.
+        return ['-Werror=attributes']
+
+    def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+        return ['-r', '-o', prelink_name] + obj_list
+
+    def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+        if threads == 0:
+            if mesonlib.version_compare(self.version, '>= 10.0'):
+                return ['-flto=auto']
+            # This matches clang's behavior of using the number of cpus
+            return [f'-flto={multiprocessing.cpu_count()}']
+        elif threads > 0:
+            return [f'-flto={threads}']
+        return super().get_lto_compile_args(threads=threads)
diff --git a/meson/mesonbuild/compilers/mixins/intel.py b/meson/mesonbuild/compilers/mixins/intel.py
new file mode 100644
index 000000000..89f351854
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/intel.py
@@ -0,0 +1,189 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the Intel Compiler families.
+
+Intel provides both a posix/gcc-like compiler (ICC) for MacOS and Linux,
+with Meson mixin IntelGnuLikeCompiler.
+For Windows, the Intel msvc-like compiler (ICL) Meson mixin
+is IntelVisualStudioLikeCompiler.
+"""
+
+import os
+import typing as T
+
+from ... import mesonlib
+from ..compilers import CompileCheckMode
+from .gnu import GnuLikeCompiler
+from .visualstudio import VisualStudioLikeCompiler
+
+if T.TYPE_CHECKING:
+    from ...arglist import CompilerArgs
+    from ...dependencies import Dependency
+    from ...environment import Environment
+
+# XXX: avoid circular dependencies
+# TODO: this belongs in a posix compiler class
+# NOTE: the default Intel optimization is -O2, unlike GNU which defaults to -O0.
+# this can be surprising, particularly for debug builds, so we specify the
+# default as -O0.
+# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-o
+# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-g
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-o
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-g
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-traceback
+# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
+
+
+class IntelGnuLikeCompiler(GnuLikeCompiler):
+    """
+    Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0
+    debugoptimized: -g -O2
+    release: -O3
+    minsize: -O2
+    """
+
+    BUILD_ARGS = {
+        'plain': [],
+        'debug': ["-g", "-traceback"],
+        'debugoptimized': ["-g", "-traceback"],
+        'release': [],
+        'minsize': [],
+        'custom': [],
+    }  # type: T.Dict[str, T.List[str]]
+
+    OPTIM_ARGS = {
+        '0': ['-O0'],
+        'g': ['-O0'],
+        '1': ['-O1'],
+        '2': ['-O2'],
+        '3': ['-O3'],
+        's': ['-Os'],
+    }
+
+    def __init__(self) -> None:
+        super().__init__()
+        # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support.
+        #
+        # It does have IPO, which serves much the same purpose as LOT, but
+        # there is an unfortunate rule for using IPO (you can't control the
+        # name of the output file) which break assumptions meson makes
+        self.base_options = {mesonlib.OptionKey(o) for o in [
+            'b_pch', 'b_lundef', 'b_asneeded', 'b_pgo', 'b_coverage',
+            'b_ndebug', 'b_staticpic', 'b_pie']}
+        self.id = 'intel'
+        self.lang_header = 'none'
+
+    def get_pch_suffix(self) -> str:
+        return 'pchi'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return ['-pch', '-pch_dir', os.path.join(pch_dir), '-x',
+                self.lang_header, '-include', header, '-x', 'none']
+
+    def get_pch_name(self, header_name: str) -> str:
+        return os.path.basename(header_name) + '.' + self.get_pch_suffix()
+
+    def openmp_flags(self) -> T.List[str]:
+        if mesonlib.version_compare(self.version, '>=15.0.0'):
+            return ['-qopenmp']
+        else:
+            return ['-openmp']
+
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        extra_args = [
+            '-diag-error', '10006',  # ignoring unknown option
+            '-diag-error', '10148',  # Option not supported
+            '-diag-error', '10155',  # ignoring argument required
+            '-diag-error', '10156',  # ignoring not argument allowed
+            '-diag-error', '10157',  # Ignoring argument of the wrong type
+            '-diag-error', '10158',  # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
+        ]
+        return super().get_compiler_check_args(mode) + extra_args
+
+    def get_profile_generate_args(self) -> T.List[str]:
+        return ['-prof-gen=threadsafe']
+
+    def get_profile_use_args(self) -> T.List[str]:
+        return ['-prof-use']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return self.BUILD_ARGS[buildtype]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return self.OPTIM_ARGS[optimization_level]
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        return ['-diag-error', '1292']
+
+
+class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler):
+
+    """Abstractions for ICL, the Intel compiler on Windows."""
+
+    BUILD_ARGS = {
+        'plain': [],
+        'debug': ["/Zi", "/traceback"],
+        'debugoptimized': ["/Zi", "/traceback"],
+        'release': [],
+        'minsize': [],
+        'custom': [],
+    }  # type: T.Dict[str, T.List[str]]
+
+    OPTIM_ARGS = {
+        '0': ['/Od'],
+        'g': ['/Od'],
+        '1': ['/O1'],
+        '2': ['/O2'],
+        '3': ['/O3'],
+        's': ['/Os'],
+    }
+
+    def __init__(self, target: str) -> None:
+        super().__init__(target)
+        self.id = 'intel-cl'
+
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        args = super().get_compiler_check_args(mode)
+        if mode is not CompileCheckMode.LINK:
+            args.extend([
+                '/Qdiag-error:10006',  # ignoring unknown option
+                '/Qdiag-error:10148',  # Option not supported
+                '/Qdiag-error:10155',  # ignoring argument required
+                '/Qdiag-error:10156',  # ignoring not argument allowed
+                '/Qdiag-error:10157',  # Ignoring argument of the wrong type
+                '/Qdiag-error:10158',  # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
+            ])
+        return args
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # ICL provides a cl.exe that returns the version of MSVC it tries to
+        # emulate, so we'll get the version from that and pass it to the same
+        # function the real MSVC uses to calculate the toolset version.
+        _, _, err = mesonlib.Popen_safe(['cl.exe'])
+        v1, v2, *_ = mesonlib.search_version(err).split('.')
+        version = int(v1 + v2)
+        return self._calculate_toolset_version(version)
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['/Qopenmp']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return self.BUILD_ARGS[buildtype]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return self.OPTIM_ARGS[optimization_level]
+
+    def get_pch_base_name(self, header: str) -> str:
+        return os.path.basename(header)
\ No newline at end of file
diff --git a/meson/mesonbuild/compilers/mixins/islinker.py b/meson/mesonbuild/compilers/mixins/islinker.py
new file mode 100644
index 000000000..4c29f8c0d
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/islinker.py
@@ -0,0 +1,129 @@
+# Copyright 2019 The Meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Mixins for compilers that *are* linkers.
+
+While many compilers (such as gcc and clang) are used by meson to dispatch
+linker commands and other (like MSVC) are not, a few (such as DMD) actually
+are both the linker and compiler in one binary. This module provides mixin
+classes for those cases.
+"""
+
+import typing as T
+
+from ...mesonlib import EnvironmentException, MesonException, is_windows
+
+if T.TYPE_CHECKING:
+    from ...coredata import KeyedOptionDictType
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+
+class BasicLinkerIsCompilerMixin(Compiler):
+
+    """Provides a baseline of methods that a linker would implement.
+
+    In every case this provides a "no" or "empty" answer. If a compiler
+    implements any of these it needs a different mixin or to override that
+    functionality itself.
+    """
+
+    def sanitizer_link_args(self, value: str) -> T.List[str]:
+        return []
+
+    def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+        return []
+
+    def can_linker_accept_rsp(self) -> bool:
+        return is_windows()
+
+    def get_linker_exelist(self) -> T.List[str]:
+        return self.exelist.copy()
+
+    def get_linker_output_args(self, output: str) -> T.List[str]:
+        return []
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return []
+
+    def get_linker_lib_prefix(self) -> str:
+        return ''
+
+    def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return []
+
+    def has_multi_link_args(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+        return False, False
+
+    def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+        return []
+
+    def get_std_shared_lib_link_args(self) -> T.List[str]:
+        return []
+
+    def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return self.get_std_shared_lib_link_args()
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        raise EnvironmentException(f'Linker {self.id} does not support link_whole')
+
+    def get_allow_undefined_link_args(self) -> T.List[str]:
+        raise EnvironmentException(f'Linker {self.id} does not support allow undefined')
+
+    def get_pie_link_args(self) -> T.List[str]:
+        raise EnvironmentException(f'Linker {self.id} does not support position-independent executable')
+
+    def get_undefined_link_args(self) -> T.List[str]:
+        return []
+
+    def get_coverage_link_args(self) -> T.List[str]:
+        return []
+
+    def no_undefined_link_args(self) -> T.List[str]:
+        return []
+
+    def bitcode_args(self) -> T.List[str]:
+        raise MesonException("This linker doesn't support bitcode bundles")
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str,
+                        darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        raise MesonException("This linker doesn't support soname args")
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+        return []
+
+    def get_link_debugfile_name(self, target: str) -> str:
+        return ''
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        return []
diff --git a/meson/mesonbuild/compilers/mixins/pgi.py b/meson/mesonbuild/compilers/mixins/pgi.py
new file mode 100644
index 000000000..51de8afa5
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/pgi.py
@@ -0,0 +1,109 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the PGI family of compilers."""
+
+import typing as T
+import os
+from pathlib import Path
+
+from ..compilers import clike_debug_args, clike_optimization_args
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+pgi_buildtype_args = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}  # type: T.Dict[str, T.List[str]]
+
+
+class PGICompiler(Compiler):
+
+    def __init__(self) -> None:
+        self.base_options = {OptionKey('b_pch')}
+        self.id = 'pgi'
+
+        default_warn_args = ['-Minform=inform']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args,
+                          '3': default_warn_args
+        }  # type: T.Dict[str, T.List[str]]
+
+    def get_module_incdir_args(self) -> T.Tuple[str]:
+        return ('-module', )
+
+    def get_no_warn_args(self) -> T.List[str]:
+        return ['-silent']
+
+    def gen_import_library_args(self, implibname: str) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # PGI -fPIC is Linux only.
+        if self.info.is_linux():
+            return ['-fPIC']
+        return []
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['-mp']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return pgi_buildtype_args[buildtype]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return clike_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '-L':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+        return parameter_list
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pch_suffix(self) -> str:
+        # PGI defaults to .pch suffix for PCH on Linux and Windows with --pch option
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        # PGI supports PCH for C++ only.
+        hdr = Path(pch_dir).resolve().parent / header
+        if self.language == 'cpp':
+            return ['--pch',
+                    '--pch_dir', str(hdr.parent),
+                    f'-I{hdr.parent}']
+        else:
+            return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        # PGI cannot accept -pthread, it's already threaded
+        return []
diff --git a/meson/mesonbuild/compilers/mixins/visualstudio.py b/meson/mesonbuild/compilers/mixins/visualstudio.py
new file mode 100644
index 000000000..e911f64f4
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/visualstudio.py
@@ -0,0 +1,428 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions to simplify compilers that implement an MSVC compatible
+interface.
+"""
+
+import abc
+import os
+import typing as T
+
+from ... import arglist
+from ... import mesonlib
+from ... import mlog
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from .clike import CLikeCompiler as Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+vs32_instruction_set_args = {
+    'mmx': ['/arch:SSE'], # There does not seem to be a flag just for MMX
+    'sse': ['/arch:SSE'],
+    'sse2': ['/arch:SSE2'],
+    'sse3': ['/arch:AVX'], # VS leaped from SSE2 directly to AVX.
+    'sse41': ['/arch:AVX'],
+    'sse42': ['/arch:AVX'],
+    'avx': ['/arch:AVX'],
+    'avx2': ['/arch:AVX2'],
+    'neon': None,
+}  # T.Dicst[str, T.Optional[T.List[str]]]
+
+# The 64 bit compiler defaults to /arch:avx.
+vs64_instruction_set_args = {
+    'mmx': ['/arch:AVX'],
+    'sse': ['/arch:AVX'],
+    'sse2': ['/arch:AVX'],
+    'sse3': ['/arch:AVX'],
+    'ssse3': ['/arch:AVX'],
+    'sse41': ['/arch:AVX'],
+    'sse42': ['/arch:AVX'],
+    'avx': ['/arch:AVX'],
+    'avx2': ['/arch:AVX2'],
+    'neon': None,
+}  # T.Dicst[str, T.Optional[T.List[str]]]
+
+msvc_optimization_args = {
+    '0': ['/Od'],
+    'g': [], # No specific flag to optimize debugging, /Zi or /ZI will create debug information
+    '1': ['/O1'],
+    '2': ['/O2'],
+    '3': ['/O2', '/Gw'],
+    's': ['/O1', '/Gw'],
+}  # type: T.Dict[str, T.List[str]]
+
+msvc_debug_args = {
+    False: [],
+    True: ['/Zi']
+}  # type: T.Dict[bool, T.List[str]]
+
+
+class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta):
+
+    """A common interface for all compilers implementing an MSVC-style
+    interface.
+
+    A number of compilers attempt to mimic MSVC, with varying levels of
+    success, such as Clang-CL and ICL (the Intel C/C++ Compiler for Windows).
+    This class implements as much common logic as possible.
+    """
+
+    std_warn_args = ['/W3']
+    std_opt_args = ['/O2']
+    ignore_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS + ['execinfo']
+    internal_libs = []  # type: T.List[str]
+
+    crt_args = {
+        'none': [],
+        'md': ['/MD'],
+        'mdd': ['/MDd'],
+        'mt': ['/MT'],
+        'mtd': ['/MTd'],
+    }  # type: T.Dict[str, T.List[str]]
+
+    # /showIncludes is needed for build dependency tracking in Ninja
+    # See: https://ninja-build.org/manual.html#_deps
+    always_args = ['/nologo', '/showIncludes']
+    warn_args = {
+        '0': [],
+        '1': ['/W2'],
+        '2': ['/W3'],
+        '3': ['/W4'],
+    }  # type: T.Dict[str, T.List[str]]
+
+    INVOKES_LINKER = False
+
+    def __init__(self, target: str):
+        self.base_options = {mesonlib.OptionKey(o) for o in ['b_pch', 'b_ndebug', 'b_vscrt']} # FIXME add lto, pgo and the like
+        self.target = target
+        self.is_64 = ('x64' in target) or ('x86_64' in target)
+        # do some canonicalization of target machine
+        if 'x86_64' in target:
+            self.machine = 'x64'
+        elif '86' in target:
+            self.machine = 'x86'
+        elif 'aarch64' in target:
+            self.machine = 'arm64'
+        elif 'arm' in target:
+            self.machine = 'arm'
+        else:
+            self.machine = target
+        if mesonlib.version_compare(self.version, '>=19.28.29910'): # VS 16.9.0 includes cl 19.28.29910
+            self.base_options.add(mesonlib.OptionKey('b_sanitize'))
+        assert self.linker is not None
+        self.linker.machine = self.machine
+
+    # Override CCompiler.get_always_args
+    def get_always_args(self) -> T.List[str]:
+        return self.always_args
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_name(self, header: str) -> str:
+        chopped = os.path.basename(header).split('.')[:-1]
+        chopped.append(self.get_pch_suffix())
+        pchname = '.'.join(chopped)
+        return pchname
+
+    def get_pch_base_name(self, header: str) -> str:
+        # This needs to be implemented by inherting classes
+        raise NotImplementedError
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        base = self.get_pch_base_name(header)
+        pchname = self.get_pch_name(header)
+        return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)]
+
+    def get_preprocess_only_args(self) -> T.List[str]:
+        return ['/EP']
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['/c']
+
+    def get_no_optimization_args(self) -> T.List[str]:
+        return ['/Od','/Oi-']
+
+    def sanitizer_compile_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        if value != 'address':
+            raise mesonlib.MesonException('VS only supports address sanitizer at the moment.')
+        return ['/fsanitize=address']
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        if target.endswith('.exe'):
+            return ['/Fe' + target]
+        return ['/Fo' + target]
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return []
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return msvc_debug_args[is_debug]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        args = msvc_optimization_args[optimization_level]
+        if mesonlib.version_compare(self.version, '<18.0'):
+            args = [arg for arg in args if arg != '/Gw']
+        return args
+
+    def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
+        return ['/link'] + args
+
+    def get_pic_args(self) -> T.List[str]:
+        return [] # PIC is handled by the loader on Windows
+
+    def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
+        if not isinstance(defsfile, str):
+            raise RuntimeError('Module definitions file should be str')
+        # With MSVC, DLLs only export symbols that are explicitly exported,
+        # so if a module defs file is specified, we use that to export symbols
+        return ['/DEF:' + defsfile]
+
+    def gen_pch_args(self, header: str, source: str, pchname: str) -> T.Tuple[str, T.List[str]]:
+        objname = os.path.splitext(pchname)[0] + '.obj'
+        return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['/openmp']
+
+    def openmp_link_flags(self) -> T.List[str]:
+        return []
+
+    # FIXME, no idea what these should be.
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        result = []
+        for i in args:
+            # -mms-bitfields is specific to MinGW-GCC
+            # -pthread is only valid for GCC
+            if i in ('-mms-bitfields', '-pthread'):
+                continue
+            if i.startswith('-LIBPATH:'):
+                i = '/LIBPATH:' + i[9:]
+            elif i.startswith('-L'):
+                i = '/LIBPATH:' + i[2:]
+            # Translate GNU-style -lfoo library name to the import library
+            elif i.startswith('-l'):
+                name = i[2:]
+                if name in cls.ignore_libs:
+                    # With MSVC, these are provided by the C runtime which is
+                    # linked in by default
+                    continue
+                else:
+                    i = name + '.lib'
+            elif i.startswith('-isystem'):
+                # just use /I for -isystem system include path s
+                if i.startswith('-isystem='):
+                    i = '/I' + i[9:]
+                else:
+                    i = '/I' + i[8:]
+            elif i.startswith('-idirafter'):
+                # same as -isystem, but appends the path instead
+                if i.startswith('-idirafter='):
+                    i = '/I' + i[11:]
+                else:
+                    i = '/I' + i[10:]
+            # -pthread in link flags is only used on Linux
+            elif i == '-pthread':
+                continue
+            result.append(i)
+        return result
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        result = []
+        for arg in args:
+            if arg.startswith(('/LIBPATH:', '-LIBPATH:')):
+                result.append('-L' + arg[9:])
+            elif arg.endswith(('.a', '.lib')) and not os.path.isabs(arg):
+                result.append('-l' + arg)
+            else:
+                result.append(arg)
+        return result
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['/WX']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if path == '':
+            path = '.'
+        # msvc does not have a concept of system header dirs.
+        return ['-I' + path]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '/I':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+            elif i[:9] == '/LIBPATH:':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
+
+    # Visual Studio is special. It ignores some arguments it does not
+    # understand and you can't tell it to error out on those.
+    # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: str) -> T.Tuple[bool, bool]:
+        warning_text = '4044' if mode == 'link' else '9002'
+        with self._build_wrapper(code, env, extra_args=args, mode=mode) as p:
+            if p.returncode != 0:
+                return False, p.cached
+            return not(warning_text in p.stderr or warning_text in p.stdout), p.cached
+
+    def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+        pdbarr = rel_obj.split('.')[:-1]
+        pdbarr += ['pdb']
+        args = ['/Fd' + '.'.join(pdbarr)]
+        return args
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        if self.is_64:
+            return vs64_instruction_set_args.get(instruction_set, None)
+        return vs32_instruction_set_args.get(instruction_set, None)
+
+    def _calculate_toolset_version(self, version: int) -> T.Optional[str]:
+        if version < 1310:
+            return '7.0'
+        elif version < 1400:
+            return '7.1' # (Visual Studio 2003)
+        elif version < 1500:
+            return '8.0' # (Visual Studio 2005)
+        elif version < 1600:
+            return '9.0' # (Visual Studio 2008)
+        elif version < 1700:
+            return '10.0' # (Visual Studio 2010)
+        elif version < 1800:
+            return '11.0' # (Visual Studio 2012)
+        elif version < 1900:
+            return '12.0' # (Visual Studio 2013)
+        elif version < 1910:
+            return '14.0' # (Visual Studio 2015)
+        elif version < 1920:
+            return '14.1' # (Visual Studio 2017)
+        elif version < 1930:
+            return '14.2' # (Visual Studio 2019)
+        mlog.warning(f'Could not find toolset for version {self.version!r}')
+        return None
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # See boost/config/compiler/visualc.cpp for up to date mapping
+        try:
+            version = int(''.join(self.version.split('.')[0:2]))
+        except ValueError:
+            return None
+        return self._calculate_toolset_version(version)
+
+    def get_default_include_dirs(self) -> T.List[str]:
+        if 'INCLUDE' not in os.environ:
+            return []
+        return os.environ['INCLUDE'].split(os.pathsep)
+
+    def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+        if crt_val in self.crt_args:
+            return self.crt_args[crt_val]
+        assert(crt_val in ['from_buildtype', 'static_from_buildtype'])
+        dbg = 'mdd'
+        rel = 'md'
+        if crt_val == 'static_from_buildtype':
+            dbg = 'mtd'
+            rel = 'mt'
+        # Match what build type flags used to do.
+        if buildtype == 'plain':
+            return []
+        elif buildtype == 'debug':
+            return self.crt_args[dbg]
+        elif buildtype == 'debugoptimized':
+            return self.crt_args[rel]
+        elif buildtype == 'release':
+            return self.crt_args[rel]
+        elif buildtype == 'minsize':
+            return self.crt_args[rel]
+        else:
+            assert(buildtype == 'custom')
+            raise mesonlib.EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".')
+
+    def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+        # MSVC doesn't have __attribute__ like Clang and GCC do, so just return
+        # false without compiling anything
+        return name in ['dllimport', 'dllexport'], False
+
+    def get_argument_syntax(self) -> str:
+        return 'msvc'
+
+
+class MSVCCompiler(VisualStudioLikeCompiler):
+
+    """Spcific to the Microsoft Compilers."""
+
+    def __init__(self, target: str):
+        super().__init__(target)
+        self.id = 'msvc'
+
+    def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+        args = super().get_compile_debugfile_args(rel_obj, pch)
+        # When generating a PDB file with PCH, all compile commands write
+        # to the same PDB file. Hence, we need to serialize the PDB
+        # writes using /FS since we do parallel builds. This slows down the
+        # build obviously, which is why we only do this when PCH is on.
+        # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was
+        # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx
+        if pch and mesonlib.version_compare(self.version, '>=18.0'):
+            args = ['/FS'] + args
+        return args
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        if self.version.split('.')[0] == '16' and instruction_set == 'avx':
+            # VS documentation says that this exists and should work, but
+            # it does not. The headers do not contain AVX intrinsics
+            # and they can not be called.
+            return None
+        return super().get_instruction_set_args(instruction_set)
+
+    def get_pch_base_name(self, header: str) -> str:
+        return os.path.basename(header)
+
+
+class ClangClCompiler(VisualStudioLikeCompiler):
+
+    """Spcific to Clang-CL."""
+
+    def __init__(self, target: str):
+        super().__init__(target)
+        self.id = 'clang-cl'
+
+        # Assembly
+        self.can_compile_suffixes.add('s')
+
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: str) -> T.Tuple[bool, bool]:
+        if mode != 'link':
+            args = args + ['-Werror=unknown-argument']
+        return super().has_arguments(args, env, code, mode)
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # XXX: what is the right thing to do here?
+        return '14.1'
+
+    def get_pch_base_name(self, header: str) -> str:
+        return header
diff --git a/meson/mesonbuild/compilers/mixins/xc16.py b/meson/mesonbuild/compilers/mixins/xc16.py
new file mode 100644
index 000000000..77c4690ff
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/xc16.py
@@ -0,0 +1,127 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the Microchip XC16 C compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+xc16_buildtype_args = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}  # type: T.Dict[str, T.List[str]]
+
+xc16_optimization_args = {
+    '0': ['-O0'],
+    'g': ['-O0'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}  # type: T.Dict[str, T.List[str]]
+
+xc16_debug_args = {
+    False: [],
+    True: []
+}  # type: T.Dict[bool, T.List[str]]
+
+
+class Xc16Compiler(Compiler):
+
+    def __init__(self) -> None:
+        if not self.is_cross:
+            raise EnvironmentException('xc16 supports only cross-compilation.')
+        self.id = 'xc16'
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        default_warn_args = []  # type: T.List[str]
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + [],
+                          '3': default_warn_args + []}  # type: T.Dict[str, T.List[str]]
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # PIC support is not enabled by default for xc16,
+        # if users want to use it, they need to add the required arguments explicitly
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return xc16_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['--nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return xc16_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return xc16_debug_args[is_debug]
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        result = []
+        for i in args:
+            if i.startswith('-D'):
+                i = '-D' + i[2:]
+            if i.startswith('-I'):
+                i = '-I' + i[2:]
+            if i.startswith('-Wl,-rpath='):
+                continue
+            elif i == '--print-search-dirs':
+                continue
+            elif i.startswith('-L'):
+                continue
+            result.append(i)
+        return result
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff --git a/meson/mesonbuild/compilers/objc.py b/meson/mesonbuild/compilers/objc.py
new file mode 100644
index 000000000..7afa44f41
--- /dev/null
+++ b/meson/mesonbuild/compilers/objc.py
@@ -0,0 +1,108 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import MachineChoice, OptionKey
+
+from .compilers import Compiler
+from .mixins.clike import CLikeCompiler
+from .mixins.gnu import GnuCompiler
+from .mixins.clang import ClangCompiler
+
+if T.TYPE_CHECKING:
+    from ..programs import ExternalProgram
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers import DynamicLinker
+
+
+class ObjCCompiler(CLikeCompiler, Compiler):
+
+    language = 'objc'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrap: T.Optional['ExternalProgram'],
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        Compiler.__init__(self, exelist, version, for_machine, info,
+                          is_cross=is_cross, full_version=full_version,
+                          linker=linker)
+        CLikeCompiler.__init__(self, exe_wrap)
+
+    @staticmethod
+    def get_display_language() -> str:
+        return 'Objective-C'
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        code = '#import\nint main(void) { return 0; }\n'
+        return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjc.m', code)
+
+
+class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        GnuCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        ClangCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+    def get_options(self) -> 'coredata.KeyedOptionDictType':
+        opts = super().get_options()
+        opts.update({
+            OptionKey('std', machine=self.for_machine, lang='c'): coredata.UserComboOption(
+                'C language standard to use',
+                ['none', 'c89', 'c99', 'c11', 'c17', 'gnu89', 'gnu99', 'gnu11', 'gnu17'],
+                'none',
+            )
+        })
+        return opts
+
+    def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+        args = []
+        std = options[OptionKey('std', machine=self.for_machine, lang='c')]
+        if std.value != 'none':
+            args.append('-std=' + std.value)
+        return args
+
+class AppleClangObjCCompiler(ClangObjCCompiler):
+
+    """Handle the differences between Apple's clang and vanilla clang."""
diff --git a/meson/mesonbuild/compilers/objcpp.py b/meson/mesonbuild/compilers/objcpp.py
new file mode 100644
index 000000000..63036557d
--- /dev/null
+++ b/meson/mesonbuild/compilers/objcpp.py
@@ -0,0 +1,110 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import MachineChoice, OptionKey
+
+from .mixins.clike import CLikeCompiler
+from .compilers import Compiler
+from .mixins.gnu import GnuCompiler
+from .mixins.clang import ClangCompiler
+
+if T.TYPE_CHECKING:
+    from ..programs import ExternalProgram
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers import DynamicLinker
+
+class ObjCPPCompiler(CLikeCompiler, Compiler):
+
+    language = 'objcpp'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrap: T.Optional['ExternalProgram'],
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        Compiler.__init__(self, exelist, version, for_machine, info,
+                          is_cross=is_cross, full_version=full_version,
+                          linker=linker)
+        CLikeCompiler.__init__(self, exe_wrap)
+
+    @staticmethod
+    def get_display_language() -> str:
+        return 'Objective-C++'
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        code = '#import\nclass MyClass;int main(void) { return 0; }\n'
+        return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjcpp.mm', code)
+
+
+class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        GnuCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        ClangCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+    def get_options(self) -> 'coredata.KeyedOptionDictType':
+        opts = super().get_options()
+        opts.update({
+            OptionKey('std', machine=self.for_machine, lang='cpp'): coredata.UserComboOption(
+                'C++ language standard to use',
+                ['none', 'c++98', 'c++11', 'c++14', 'c++17', 'gnu++98', 'gnu++11', 'gnu++14', 'gnu++17'],
+                'none',
+            )
+        })
+        return opts
+
+    def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+        args = []
+        std = options[OptionKey('std', machine=self.for_machine, lang='cpp')]
+        if std.value != 'none':
+            args.append('-std=' + std.value)
+        return args
+
+
+class AppleClangObjCPPCompiler(ClangObjCPPCompiler):
+
+    """Handle the differences between Apple's clang and vanilla clang."""
diff --git a/meson/mesonbuild/compilers/rust.py b/meson/mesonbuild/compilers/rust.py
new file mode 100644
index 000000000..2b566c8b9
--- /dev/null
+++ b/meson/mesonbuild/compilers/rust.py
@@ -0,0 +1,170 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path
+import textwrap
+import typing as T
+
+from .. import coredata
+from ..mesonlib import (
+    EnvironmentException, MachineChoice, MesonException, Popen_safe,
+    OptionKey,
+)
+from .compilers import Compiler, rust_buildtype_args, clike_debug_args
+
+if T.TYPE_CHECKING:
+    from ..coredata import KeyedOptionDictType
+    from ..envconfig import MachineInfo
+    from ..environment import Environment  # noqa: F401
+    from ..linkers import DynamicLinker
+    from ..programs import ExternalProgram
+
+
+rust_optimization_args = {
+    '0': [],
+    'g': ['-C', 'opt-level=0'],
+    '1': ['-C', 'opt-level=1'],
+    '2': ['-C', 'opt-level=2'],
+    '3': ['-C', 'opt-level=3'],
+    's': ['-C', 'opt-level=s'],
+}  # type: T.Dict[str, T.List[str]]
+
+class RustCompiler(Compiler):
+
+    # rustc doesn't invoke the compiler itself, it doesn't need a LINKER_PREFIX
+    language = 'rust'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 full_version: T.Optional[str] = None,
+                 linker: T.Optional['DynamicLinker'] = None):
+        super().__init__(exelist, version, for_machine, info,
+                         is_cross=is_cross, full_version=full_version,
+                         linker=linker)
+        self.exe_wrapper = exe_wrapper
+        self.id = 'rustc'
+        self.base_options.add(OptionKey('b_colorout'))
+        if 'link' in self.linker.id:
+            self.base_options.add(OptionKey('b_vscrt'))
+
+    def needs_static_linker(self) -> bool:
+        return False
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        source_name = os.path.join(work_dir, 'sanity.rs')
+        output_name = os.path.join(work_dir, 'rusttest')
+        with open(source_name, 'w', encoding='utf-8') as ofile:
+            ofile.write(textwrap.dedent(
+                '''fn main() {
+                }
+                '''))
+        pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name],
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE,
+                              cwd=work_dir)
+        _stdo, _stde = pc.communicate()
+        assert isinstance(_stdo, bytes)
+        assert isinstance(_stde, bytes)
+        stdo = _stdo.decode('utf-8', errors='replace')
+        stde = _stde.decode('utf-8', errors='replace')
+        if pc.returncode != 0:
+            raise EnvironmentException('Rust compiler {} can not compile programs.\n{}\n{}'.format(
+                self.name_string(),
+                stdo,
+                stde))
+        if self.is_cross:
+            if self.exe_wrapper is None:
+                # Can't check if the binaries run so we have to assume they do
+                return
+            cmdlist = self.exe_wrapper.get_command() + [output_name]
+        else:
+            cmdlist = [output_name]
+        pe = subprocess.Popen(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+        pe.wait()
+        if pe.returncode != 0:
+            raise EnvironmentException('Executables created by Rust compiler %s are not runnable.' % self.name_string())
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['--dep-info', outfile]
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return rust_buildtype_args[buildtype]
+
+    def get_sysroot(self) -> str:
+        cmd = self.exelist + ['--print', 'sysroot']
+        p, stdo, stde = Popen_safe(cmd)
+        return stdo.split('\n')[0]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return rust_optimization_args[optimization_level]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-L':
+                for j in ['dependency', 'crate', 'native', 'framework', 'all']:
+                    combined_len = len(j) + 3
+                    if i[:combined_len] == f'-L{j}=':
+                        parameter_list[idx] = i[:combined_len] + os.path.normpath(os.path.join(build_dir, i[combined_len:]))
+                        break
+
+        return parameter_list
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    @classmethod
+    def use_linker_args(cls, linker: str) -> T.List[str]:
+        return ['-C', f'linker={linker}']
+
+    # Rust does not have a use_linker_args because it dispatches to a gcc-like
+    # C compiler for dynamic linking, as such we invoke the C compiler's
+    # use_linker_args method instead.
+
+    def get_options(self) -> 'KeyedOptionDictType':
+        key = OptionKey('std', machine=self.for_machine, lang=self.language)
+        return {
+            key: coredata.UserComboOption(
+                'Rust Eddition to use',
+                ['none', '2015', '2018'],
+                'none',
+            ),
+        }
+
+    def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        args = []
+        key = OptionKey('std', machine=self.for_machine, lang=self.language)
+        std = options[key]
+        if std.value != 'none':
+            args.append('--edition=' + std.value)
+        return args
+
+    def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+        # Rust handles this for us, we don't need to do anything
+        return []
+
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
+        if colortype in {'always', 'never', 'auto'}:
+            return [f'--color={colortype}']
+        raise MesonException(f'Invalid color type for rust {colortype}')
+
+    def get_linker_always_args(self) -> T.List[str]:
+        args: T.List[str] = []
+        for a in super().get_linker_always_args():
+            args.extend(['-C', f'link-arg={a}'])
+        return args
diff --git a/meson/mesonbuild/compilers/swift.py b/meson/mesonbuild/compilers/swift.py
new file mode 100644
index 000000000..2d52e2182
--- /dev/null
+++ b/meson/mesonbuild/compilers/swift.py
@@ -0,0 +1,127 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path
+import typing as T
+
+from ..mesonlib import EnvironmentException, MachineChoice
+
+from .compilers import Compiler, swift_buildtype_args, clike_debug_args
+
+if T.TYPE_CHECKING:
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers import DynamicLinker
+
+swift_optimization_args = {
+    '0': [],
+    'g': [],
+    '1': ['-O'],
+    '2': ['-O'],
+    '3': ['-O'],
+    's': ['-O'],
+}  # type: T.Dict[str, T.List[str]]
+
+class SwiftCompiler(Compiler):
+
+    LINKER_PREFIX = ['-Xlinker']
+    language = 'swift'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo', full_version: T.Optional[str] = None,
+                 linker: T.Optional['DynamicLinker'] = None):
+        super().__init__(exelist, version, for_machine, info,
+                         is_cross=is_cross, full_version=full_version,
+                         linker=linker)
+        self.version = version
+        self.id = 'llvm'
+
+    def needs_static_linker(self) -> bool:
+        return True
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['--fatal-warnings']
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['-emit-dependencies']
+
+    def depfile_for_object(self, objfile: str) -> str:
+        return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
+
+    def get_depfile_suffix(self) -> str:
+        return 'd'
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-o', target]
+
+    def get_header_import_args(self, headername: str) -> T.List[str]:
+        return ['-import-objc-header', headername]
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return swift_buildtype_args[buildtype]
+
+    def get_std_exe_link_args(self) -> T.List[str]:
+        return ['-emit-executable']
+
+    def get_module_args(self, modname: str) -> T.List[str]:
+        return ['-module-name', modname]
+
+    def get_mod_gen_args(self) -> T.List[str]:
+        return ['-emit-module']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        return ['-I' + path]
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '-L':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+        return parameter_list
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        src = 'swifttest.swift'
+        source_name = os.path.join(work_dir, src)
+        output_name = os.path.join(work_dir, 'swifttest')
+        extra_flags: T.List[str] = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+        if self.is_cross:
+            extra_flags += self.get_compile_only_args()
+        else:
+            extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+        with open(source_name, 'w', encoding='utf-8') as ofile:
+            ofile.write('''print("Swift compilation is working.")
+''')
+        pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
+        pc.wait()
+        if pc.returncode != 0:
+            raise EnvironmentException('Swift compiler %s can not compile programs.' % self.name_string())
+        if self.is_cross:
+            # Can't check if the binaries run so we have to assume they do
+            return
+        if subprocess.call(output_name) != 0:
+            raise EnvironmentException('Executables created by Swift compiler %s are not runnable.' % self.name_string())
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return swift_optimization_args[optimization_level]
diff --git a/meson/mesonbuild/compilers/vala.py b/meson/mesonbuild/compilers/vala.py
new file mode 100644
index 000000000..b8144f6bc
--- /dev/null
+++ b/meson/mesonbuild/compilers/vala.py
@@ -0,0 +1,138 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import typing as T
+
+from .. import mlog
+from ..mesonlib import EnvironmentException, MachineChoice, version_compare, OptionKey
+
+from .compilers import Compiler, LibType
+
+if T.TYPE_CHECKING:
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+
+class ValaCompiler(Compiler):
+
+    language = 'vala'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo'):
+        super().__init__(exelist, version, for_machine, info, is_cross=is_cross)
+        self.version = version
+        self.id = 'valac'
+        self.base_options = {OptionKey('b_colorout')}
+
+    def needs_static_linker(self) -> bool:
+        return False # Because compiles into C.
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return []
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return ['--debug'] if is_debug else []
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [] # Because compiles into C.
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return [] # Because compiles into C.
+
+    def get_pic_args(self) -> T.List[str]:
+        return []
+
+    def get_pie_args(self) -> T.List[str]:
+        return []
+
+    def get_pie_link_args(self) -> T.List[str]:
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return ['-C']
+
+    def get_warn_args(self, warning_level: str) -> T.List[str]:
+        return []
+
+    def get_no_warn_args(self) -> T.List[str]:
+        return ['--disable-warnings']
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['--fatal-warnings']
+
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
+        if version_compare(self.version, '>=0.37.1'):
+            return ['--color=' + colortype]
+        return []
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '--girdir=':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+            if i[:10] == '--vapidir=':
+                parameter_list[idx] = i[:10] + os.path.normpath(os.path.join(build_dir, i[10:]))
+            if i[:13] == '--includedir=':
+                parameter_list[idx] = i[:13] + os.path.normpath(os.path.join(build_dir, i[13:]))
+            if i[:14] == '--metadatadir=':
+                parameter_list[idx] = i[:14] + os.path.normpath(os.path.join(build_dir, i[14:]))
+
+        return parameter_list
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        code = 'class MesonSanityCheck : Object { }'
+        extra_flags: T.List[str] = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+        if self.is_cross:
+            extra_flags += self.get_compile_only_args()
+        else:
+            extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+        with self.cached_compile(code, environment.coredata, extra_args=extra_flags, mode='compile') as p:
+            if p.returncode != 0:
+                msg = f'Vala compiler {self.name_string()!r} can not compile programs'
+                raise EnvironmentException(msg)
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        if buildtype in {'debug', 'debugoptimized', 'minsize'}:
+            return ['--debug']
+        return []
+
+    def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+                     libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
+        if extra_dirs and isinstance(extra_dirs, str):
+            extra_dirs = [extra_dirs]
+        # Valac always looks in the default vapi dir, so only search there if
+        # no extra dirs are specified.
+        if not extra_dirs:
+            code = 'class MesonFindLibrary : Object { }'
+            args: T.List[str] = []
+            args += env.coredata.get_external_args(self.for_machine, self.language)
+            vapi_args = ['--pkg', libname]
+            args += vapi_args
+            with self.cached_compile(code, env.coredata, extra_args=args, mode='compile') as p:
+                if p.returncode == 0:
+                    return vapi_args
+        # Not found? Try to find the vapi file itself.
+        for d in extra_dirs:
+            vapi = os.path.join(d, libname + '.vapi')
+            if os.path.isfile(vapi):
+                return [vapi]
+        mlog.debug(f'Searched {extra_dirs!r} and {libname!r} wasn\'t found')
+        return None
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        return []
diff --git a/meson/mesonbuild/coredata.py b/meson/mesonbuild/coredata.py
new file mode 100644
index 000000000..528ca9c66
--- /dev/null
+++ b/meson/mesonbuild/coredata.py
@@ -0,0 +1,1228 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import mlog, mparser
+import pickle, os, uuid
+import sys
+from itertools import chain
+from pathlib import PurePath
+from collections import OrderedDict
+from .mesonlib import (
+    HoldableObject,
+    MesonException, EnvironmentException, MachineChoice, PerMachine,
+    PerMachineDefaultable, default_libdir, default_libexecdir,
+    default_prefix, split_args, OptionKey, OptionType, stringlistify,
+)
+from .wrap import WrapMode
+import ast
+import argparse
+import configparser
+import enum
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+    from . import dependencies
+    from .compilers.compilers import Compiler, CompileResult  # noqa: F401
+    from .environment import Environment
+    from .mesonlib import OptionOverrideProxy
+    from .cmake.traceparser import CMakeCacheEntry
+
+    OptionDictType = T.Union[T.Dict[str, 'UserOption[T.Any]'], OptionOverrideProxy]
+    KeyedOptionDictType = T.Union[T.Dict['OptionKey', 'UserOption[T.Any]'], OptionOverrideProxy]
+    CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, str, T.Tuple[str, ...], str]
+
+version = '0.59.3'
+backendlist = ['ninja', 'vs', 'vs2010', 'vs2012', 'vs2013', 'vs2015', 'vs2017', 'vs2019', 'xcode']
+
+default_yielding = False
+
+# Can't bind this near the class method it seems, sadly.
+_T = T.TypeVar('_T')
+
+
+class MesonVersionMismatchException(MesonException):
+    '''Build directory generated with Meson version is incompatible with current version'''
+    def __init__(self, old_version: str, current_version: str) -> None:
+        super().__init__(f'Build directory has been generated with Meson version {old_version}, '
+                         f'which is incompatible with the current version {current_version}.')
+        self.old_version = old_version
+        self.current_version = current_version
+
+
+class UserOption(T.Generic[_T], HoldableObject):
+    def __init__(self, description: str, choices: T.Optional[T.Union[str, T.List[_T]]], yielding: T.Optional[bool]):
+        super().__init__()
+        self.choices = choices
+        self.description = description
+        if yielding is None:
+            yielding = default_yielding
+        if not isinstance(yielding, bool):
+            raise MesonException('Value of "yielding" must be a boolean.')
+        self.yielding = yielding
+
+    def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]:
+        assert isinstance(self.value, (str, int, bool, list))
+        return self.value
+
+    # Check that the input is a valid value and return the
+    # "cleaned" or "native" version. For example the Boolean
+    # option could take the string "true" and return True.
+    def validate_value(self, value: T.Any) -> _T:
+        raise RuntimeError('Derived option class did not override validate_value.')
+
+    def set_value(self, newvalue: T.Any) -> None:
+        self.value = self.validate_value(newvalue)
+
+class UserStringOption(UserOption[str]):
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        super().__init__(description, None, yielding)
+        self.set_value(value)
+
+    def validate_value(self, value: T.Any) -> str:
+        if not isinstance(value, str):
+            raise MesonException('Value "%s" for string option is not a string.' % str(value))
+        return value
+
+class UserBooleanOption(UserOption[bool]):
+    def __init__(self, description: str, value, yielding: T.Optional[bool] = None) -> None:
+        super().__init__(description, [True, False], yielding)
+        self.set_value(value)
+
+    def __bool__(self) -> bool:
+        return self.value
+
+    def validate_value(self, value: T.Any) -> bool:
+        if isinstance(value, bool):
+            return value
+        if not isinstance(value, str):
+            raise MesonException(f'Value {value} cannot be converted to a boolean')
+        if value.lower() == 'true':
+            return True
+        if value.lower() == 'false':
+            return False
+        raise MesonException('Value %s is not boolean (true or false).' % value)
+
+class UserIntegerOption(UserOption[int]):
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        min_value, max_value, default_value = value
+        self.min_value = min_value
+        self.max_value = max_value
+        c = []
+        if min_value is not None:
+            c.append('>=' + str(min_value))
+        if max_value is not None:
+            c.append('<=' + str(max_value))
+        choices = ', '.join(c)
+        super().__init__(description, choices, yielding)
+        self.set_value(default_value)
+
+    def validate_value(self, value: T.Any) -> int:
+        if isinstance(value, str):
+            value = self.toint(value)
+        if not isinstance(value, int):
+            raise MesonException('New value for integer option is not an integer.')
+        if self.min_value is not None and value < self.min_value:
+            raise MesonException('New value %d is less than minimum value %d.' % (value, self.min_value))
+        if self.max_value is not None and value > self.max_value:
+            raise MesonException('New value %d is more than maximum value %d.' % (value, self.max_value))
+        return value
+
+    def toint(self, valuestring: str) -> int:
+        try:
+            return int(valuestring)
+        except ValueError:
+            raise MesonException('Value string "%s" is not convertible to an integer.' % valuestring)
+
+class OctalInt(int):
+    # NinjaBackend.get_user_option_args uses str() to converts it to a command line option
+    # UserUmaskOption.toint() uses int(str, 8) to convert it to an integer
+    # So we need to use oct instead of dec here if we do not want values to be misinterpreted.
+    def __str__(self):
+        return oct(int(self))
+
+class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]):
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        super().__init__(description, (0, 0o777, value), yielding)
+        self.choices = ['preserve', '0000-0777']
+
+    def printable_value(self) -> str:
+        if self.value == 'preserve':
+            return self.value
+        return format(self.value, '04o')
+
+    def validate_value(self, value: T.Any) -> T.Union[str, OctalInt]:
+        if value is None or value == 'preserve':
+            return 'preserve'
+        return OctalInt(super().validate_value(value))
+
+    def toint(self, valuestring: T.Union[str, OctalInt]) -> int:
+        try:
+            return int(valuestring, 8)
+        except ValueError as e:
+            raise MesonException(f'Invalid mode: {e}')
+
+class UserComboOption(UserOption[str]):
+    def __init__(self, description: str, choices: T.List[str], value: T.Any, yielding: T.Optional[bool] = None):
+        super().__init__(description, choices, yielding)
+        if not isinstance(self.choices, list):
+            raise MesonException('Combo choices must be an array.')
+        for i in self.choices:
+            if not isinstance(i, str):
+                raise MesonException('Combo choice elements must be strings.')
+        self.set_value(value)
+
+    def validate_value(self, value: T.Any) -> str:
+        if value not in self.choices:
+            if isinstance(value, bool):
+                _type = 'boolean'
+            elif isinstance(value, (int, float)):
+                _type = 'number'
+            else:
+                _type = 'string'
+            optionsstring = ', '.join([f'"{item}"' for item in self.choices])
+            raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.'
+                                 ' Possible choices are (as string): {}.'.format(
+                                     value, _type, self.description, optionsstring))
+        return value
+
+class UserArrayOption(UserOption[T.List[str]]):
+    def __init__(self, description: str, value: T.Union[str, T.List[str]], split_args: bool = False, user_input: bool = False, allow_dups: bool = False, **kwargs: T.Any) -> None:
+        super().__init__(description, kwargs.get('choices', []), yielding=kwargs.get('yielding', None))
+        self.split_args = split_args
+        self.allow_dups = allow_dups
+        self.value = self.validate_value(value, user_input=user_input)
+
+    def validate_value(self, value: T.Union[str, T.List[str]], user_input: bool = True) -> T.List[str]:
+        # User input is for options defined on the command line (via -D
+        # options). Users can put their input in as a comma separated
+        # string, but for defining options in meson_options.txt the format
+        # should match that of a combo
+        if not user_input and isinstance(value, str) and not value.startswith('['):
+            raise MesonException('Value does not define an array: ' + value)
+
+        if isinstance(value, str):
+            if value.startswith('['):
+                try:
+                    newvalue = ast.literal_eval(value)
+                except ValueError:
+                    raise MesonException(f'malformed option {value}')
+            elif value == '':
+                newvalue = []
+            else:
+                if self.split_args:
+                    newvalue = split_args(value)
+                else:
+                    newvalue = [v.strip() for v in value.split(',')]
+        elif isinstance(value, list):
+            newvalue = value
+        else:
+            raise MesonException(f'"{newvalue}" should be a string array, but it is not')
+
+        if not self.allow_dups and len(set(newvalue)) != len(newvalue):
+            msg = 'Duplicated values in array option is deprecated. ' \
+                  'This will become a hard error in the future.'
+            mlog.deprecation(msg)
+        for i in newvalue:
+            if not isinstance(i, str):
+                raise MesonException(f'String array element "{newvalue!s}" is not a string.')
+        if self.choices:
+            bad = [x for x in newvalue if x not in self.choices]
+            if bad:
+                raise MesonException('Options "{}" are not in allowed choices: "{}"'.format(
+                    ', '.join(bad), ', '.join(self.choices)))
+        return newvalue
+
+    def extend_value(self, value: T.Union[str, T.List[str]]) -> None:
+        """Extend the value with an additional value."""
+        new = self.validate_value(value)
+        self.set_value(self.value + new)
+
+
+class UserFeatureOption(UserComboOption):
+    static_choices = ['enabled', 'disabled', 'auto']
+
+    def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+        super().__init__(description, self.static_choices, value, yielding)
+        self.name: T.Optional[str] = None  # TODO: Refactor options to all store their name
+
+    def is_enabled(self) -> bool:
+        return self.value == 'enabled'
+
+    def is_disabled(self) -> bool:
+        return self.value == 'disabled'
+
+    def is_auto(self) -> bool:
+        return self.value == 'auto'
+
+if T.TYPE_CHECKING:
+    from .dependencies.detect import TV_DepIDEntry, TV_DepID
+
+
+class DependencyCacheType(enum.Enum):
+
+    OTHER = 0
+    PKG_CONFIG = 1
+    CMAKE = 2
+
+    @classmethod
+    def from_type(cls, dep: 'dependencies.Dependency') -> 'DependencyCacheType':
+        from . import dependencies
+        # As more types gain search overrides they'll need to be added here
+        if isinstance(dep, dependencies.PkgConfigDependency):
+            return cls.PKG_CONFIG
+        if isinstance(dep, dependencies.CMakeDependency):
+            return cls.CMAKE
+        return cls.OTHER
+
+
+class DependencySubCache:
+
+    def __init__(self, type_: DependencyCacheType):
+        self.types = [type_]
+        self.__cache: T.Dict[T.Tuple[str, ...], 'dependencies.Dependency'] = {}
+
+    def __getitem__(self, key: T.Tuple[str, ...]) -> 'dependencies.Dependency':
+        return self.__cache[key]
+
+    def __setitem__(self, key: T.Tuple[str, ...], value: 'dependencies.Dependency') -> None:
+        self.__cache[key] = value
+
+    def __contains__(self, key: T.Tuple[str, ...]) -> bool:
+        return key in self.__cache
+
+    def values(self) -> T.Iterable['dependencies.Dependency']:
+        return self.__cache.values()
+
+
+class DependencyCache:
+
+    """Class that stores a cache of dependencies.
+
+    This class is meant to encapsulate the fact that we need multiple keys to
+    successfully lookup by providing a simple get/put interface.
+    """
+
+    def __init__(self, builtins: 'KeyedOptionDictType', for_machine: MachineChoice):
+        self.__cache = OrderedDict()  # type: T.MutableMapping[TV_DepID, DependencySubCache]
+        self.__builtins = builtins
+        self.__pkg_conf_key = OptionKey('pkg_config_path', machine=for_machine)
+        self.__cmake_key = OptionKey('cmake_prefix_path', machine=for_machine)
+
+    def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
+        data: T.Dict[str, T.List[str]] = {
+            DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins[self.__pkg_conf_key].value),
+            DependencyCacheType.CMAKE: stringlistify(self.__builtins[self.__cmake_key].value),
+            DependencyCacheType.OTHER: [],
+        }
+        assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
+        return tuple(data[type_])
+
+    def __iter__(self) -> T.Iterator['TV_DepID']:
+        return self.keys()
+
+    def put(self, key: 'TV_DepID', dep: 'dependencies.Dependency') -> None:
+        t = DependencyCacheType.from_type(dep)
+        if key not in self.__cache:
+            self.__cache[key] = DependencySubCache(t)
+        subkey = self.__calculate_subkey(t)
+        self.__cache[key][subkey] = dep
+
+    def get(self, key: 'TV_DepID') -> T.Optional['dependencies.Dependency']:
+        """Get a value from the cache.
+
+        If there is no cache entry then None will be returned.
+        """
+        try:
+            val = self.__cache[key]
+        except KeyError:
+            return None
+
+        for t in val.types:
+            subkey = self.__calculate_subkey(t)
+            try:
+                return val[subkey]
+            except KeyError:
+                pass
+        return None
+
+    def values(self) -> T.Iterator['dependencies.Dependency']:
+        for c in self.__cache.values():
+            yield from c.values()
+
+    def keys(self) -> T.Iterator['TV_DepID']:
+        return iter(self.__cache.keys())
+
+    def items(self) -> T.Iterator[T.Tuple['TV_DepID', T.List['dependencies.Dependency']]]:
+        for k, v in self.__cache.items():
+            vs = []
+            for t in v.types:
+                subkey = self.__calculate_subkey(t)
+                if subkey in v:
+                    vs.append(v[subkey])
+            yield k, vs
+
+    def clear(self) -> None:
+        self.__cache.clear()
+
+
+class CMakeStateCache:
+    """Class that stores internal CMake compiler states.
+
+    This cache is used to reduce the startup overhead of CMake by caching
+    all internal CMake compiler variables.
+    """
+
+    def __init__(self) -> None:
+        self.__cache: T.Dict[str, T.Dict[str, T.List[str]]] = {}
+        self.cmake_cache: T.Dict[str, 'CMakeCacheEntry'] = {}
+
+    def __iter__(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
+        return iter(self.__cache.items())
+
+    def items(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
+        return iter(self.__cache.items())
+
+    def update(self, language: str, variables: T.Dict[str, T.List[str]]):
+        if language not in self.__cache:
+            self.__cache[language] = {}
+        self.__cache[language].update(variables)
+
+    @property
+    def languages(self) -> T.Set[str]:
+        return set(self.__cache.keys())
+
+
+# Can't bind this near the class method it seems, sadly.
+_V = T.TypeVar('_V')
+
+# This class contains all data that must persist over multiple
+# invocations of Meson. It is roughly the same thing as
+# cmakecache.
+
+class CoreData:
+
+    def __init__(self, options: argparse.Namespace, scratch_dir: str, meson_command: T.List[str]):
+        self.lang_guids = {
+            'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+            'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+            'cpp': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+            'test': '3AC096D0-A1C2-E12C-1390-A8335801FDAB',
+            'directory': '2150E333-8FDC-42A3-9474-1A3956D46DE8',
+        }
+        self.test_guid = str(uuid.uuid4()).upper()
+        self.regen_guid = str(uuid.uuid4()).upper()
+        self.install_guid = str(uuid.uuid4()).upper()
+        self.meson_command = meson_command
+        self.target_guids = {}
+        self.version = version
+        self.options: 'KeyedOptionDictType' = {}
+        self.cross_files = self.__load_config_files(options, scratch_dir, 'cross')
+        self.compilers = PerMachine(OrderedDict(), OrderedDict())  # type: PerMachine[T.Dict[str, Compiler]]
+
+        # Set of subprojects that have already been initialized once, this is
+        # required to be stored and reloaded with the coredata, as we don't
+        # want to overwrite options for such subprojects.
+        self.initialized_subprojects: T.Set[str] = set()
+
+        # For host == build configuraitons these caches should be the same.
+        self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default(
+            self.is_cross_build(),
+            DependencyCache(self.options, MachineChoice.BUILD),
+            DependencyCache(self.options, MachineChoice.HOST))
+
+        self.compiler_check_cache = OrderedDict()  # type: T.Dict[CompilerCheckCacheKey, compiler.CompileResult]
+
+        # CMake cache
+        self.cmake_cache: PerMachine[CMakeStateCache] = PerMachine(CMakeStateCache(), CMakeStateCache())
+
+        # Only to print a warning if it changes between Meson invocations.
+        self.config_files = self.__load_config_files(options, scratch_dir, 'native')
+        self.builtin_options_libdir_cross_fixup()
+        self.init_builtins('')
+
+    @staticmethod
+    def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]:
+        # Need to try and make the passed filenames absolute because when the
+        # files are parsed later we'll have chdir()d.
+        if ftype == 'cross':
+            filenames = options.cross_file
+        else:
+            filenames = options.native_file
+
+        if not filenames:
+            return []
+
+        found_invalid = []  # type: T.List[str]
+        missing = []        # type: T.List[str]
+        real = []           # type: T.List[str]
+        for i, f in enumerate(filenames):
+            f = os.path.expanduser(os.path.expandvars(f))
+            if os.path.exists(f):
+                if os.path.isfile(f):
+                    real.append(os.path.abspath(f))
+                    continue
+                elif os.path.isdir(f):
+                    found_invalid.append(os.path.abspath(f))
+                else:
+                    # in this case we've been passed some kind of pipe, copy
+                    # the contents of that file into the meson private (scratch)
+                    # directory so that it can be re-read when wiping/reconfiguring
+                    copy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini')
+                    with open(f, encoding='utf-8') as rf:
+                        with open(copy, 'w', encoding='utf-8') as wf:
+                            wf.write(rf.read())
+                    real.append(copy)
+
+                    # Also replace the command line argument, as the pipe
+                    # probably won't exist on reconfigure
+                    filenames[i] = copy
+                    continue
+            if sys.platform != 'win32':
+                paths = [
+                    os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),
+                ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')
+                for path in paths:
+                    path_to_try = os.path.join(path, 'meson', ftype, f)
+                    if os.path.isfile(path_to_try):
+                        real.append(path_to_try)
+                        break
+                else:
+                    missing.append(f)
+            else:
+                missing.append(f)
+
+        if missing:
+            if found_invalid:
+                mlog.log('Found invalid candidates for', ftype, 'file:', *found_invalid)
+            mlog.log('Could not find any valid candidate for', ftype, 'files:', *missing)
+            raise MesonException(f'Cannot find specified {ftype} file: {f}')
+        return real
+
+    def builtin_options_libdir_cross_fixup(self):
+        # By default set libdir to "lib" when cross compiling since
+        # getting the "system default" is always wrong on multiarch
+        # platforms as it gets a value like lib/x86_64-linux-gnu.
+        if self.cross_files:
+            BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib'
+
+    def sanitize_prefix(self, prefix):
+        prefix = os.path.expanduser(prefix)
+        if not os.path.isabs(prefix):
+            raise MesonException(f'prefix value {prefix!r} must be an absolute path')
+        if prefix.endswith('/') or prefix.endswith('\\'):
+            # On Windows we need to preserve the trailing slash if the
+            # string is of type 'C:\' because 'C:' is not an absolute path.
+            if len(prefix) == 3 and prefix[1] == ':':
+                pass
+            # If prefix is a single character, preserve it since it is
+            # the root directory.
+            elif len(prefix) == 1:
+                pass
+            else:
+                prefix = prefix[:-1]
+        return prefix
+
+    def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any) -> T.Any:
+        '''
+        If the option is an installation directory option and the value is an
+        absolute path, check that it resides within prefix and return the value
+        as a path relative to the prefix.
+
+        This way everyone can do f.ex, get_option('libdir') and be sure to get
+        the library directory relative to prefix.
+
+        .as_posix() keeps the posix-like file seperators Meson uses.
+        '''
+        try:
+            value = PurePath(value)
+        except TypeError:
+            return value
+        if option.name.endswith('dir') and value.is_absolute() and \
+           option not in BULITIN_DIR_NOPREFIX_OPTIONS:
+            # Value must be a subdir of the prefix
+            # commonpath will always return a path in the native format, so we
+            # must use pathlib.PurePath to do the same conversion before
+            # comparing.
+            msg = ('The value of the \'{!s}\' option is \'{!s}\' which must be a '
+                   'subdir of the prefix {!r}.\nNote that if you pass a '
+                   'relative path, it is assumed to be a subdir of prefix.')
+            # os.path.commonpath doesn't understand case-insensitive filesystems,
+            # but PurePath().relative_to() does.
+            try:
+                value = value.relative_to(prefix)
+            except ValueError:
+                raise MesonException(msg.format(option, value, prefix))
+            if '..' in str(value):
+                raise MesonException(msg.format(option, value, prefix))
+        return value.as_posix()
+
+    def init_builtins(self, subproject: str) -> None:
+        # Create builtin options with default values
+        for key, opt in BUILTIN_OPTIONS.items():
+            self.add_builtin_option(self.options, key.evolve(subproject=subproject), opt)
+        for for_machine in iter(MachineChoice):
+            for key, opt in BUILTIN_OPTIONS_PER_MACHINE.items():
+                self.add_builtin_option(self.options, key.evolve(subproject=subproject, machine=for_machine), opt)
+
+    @staticmethod
+    def add_builtin_option(opts_map: 'KeyedOptionDictType', key: OptionKey,
+                           opt: 'BuiltinOption') -> None:
+        if key.subproject:
+            if opt.yielding:
+                # This option is global and not per-subproject
+                return
+            value = opts_map[key.as_root()].value
+        else:
+            value = None
+        opts_map[key] = opt.init_option(key, value, default_prefix())
+
+    def init_backend_options(self, backend_name: str) -> None:
+        if backend_name == 'ninja':
+            self.options[OptionKey('backend_max_links')] = UserIntegerOption(
+                'Maximum number of linker processes to run or 0 for no '
+                'limit',
+                (0, None, 0))
+        elif backend_name.startswith('vs'):
+            self.options[OptionKey('backend_startup_project')] = UserStringOption(
+                'Default project to execute in Visual Studio',
+                '')
+
+    def get_option(self, key: OptionKey) -> T.Union[str, int, bool, WrapMode]:
+        try:
+            v = self.options[key].value
+            if key.name == 'wrap_mode':
+                return WrapMode[v]
+            return v
+        except KeyError:
+            pass
+
+        try:
+            v = self.options[key.as_root()]
+            if v.yielding:
+                if key.name == 'wrap_mode':
+                    return WrapMode[v.value]
+                return v.value
+        except KeyError:
+            pass
+
+        raise MesonException(f'Tried to get unknown builtin option {str(key)}')
+
+    def set_option(self, key: OptionKey, value) -> None:
+        if key.is_builtin():
+            if key.name == 'prefix':
+                value = self.sanitize_prefix(value)
+            else:
+                prefix = self.options[OptionKey('prefix')].value
+                value = self.sanitize_dir_option_value(prefix, key, value)
+
+        try:
+            self.options[key].set_value(value)
+        except KeyError:
+            raise MesonException(f'Tried to set unknown builtin option {str(key)}')
+
+        if key.name == 'buildtype':
+            self._set_others_from_buildtype(value)
+        elif key.name in {'wrap_mode', 'force_fallback_for'}:
+            # We could have the system dependency cached for a dependency that
+            # is now forced to use subproject fallback. We probably could have
+            # more fine grained cache invalidation, but better be safe.
+            self.clear_deps_cache()
+
+    def clear_deps_cache(self):
+        self.deps.host.clear()
+        self.deps.build.clear()
+
+    def get_nondefault_buildtype_args(self):
+        result= []
+        value = self.options[OptionKey('buildtype')].value
+        if value == 'plain':
+            opt = '0'
+            debug = False
+        elif value == 'debug':
+            opt = '0'
+            debug = True
+        elif value == 'debugoptimized':
+            opt = '2'
+            debug = True
+        elif value == 'release':
+            opt = '3'
+            debug = False
+        elif value == 'minsize':
+            opt = 's'
+            debug = True
+        else:
+            assert(value == 'custom')
+            return []
+        actual_opt = self.options[OptionKey('optimization')].value
+        actual_debug = self.options[OptionKey('debug')].value
+        if actual_opt != opt:
+            result.append(('optimization', actual_opt, opt))
+        if actual_debug != debug:
+            result.append(('debug', actual_debug, debug))
+        return result
+
+    def _set_others_from_buildtype(self, value: str) -> None:
+        if value == 'plain':
+            opt = '0'
+            debug = False
+        elif value == 'debug':
+            opt = '0'
+            debug = True
+        elif value == 'debugoptimized':
+            opt = '2'
+            debug = True
+        elif value == 'release':
+            opt = '3'
+            debug = False
+        elif value == 'minsize':
+            opt = 's'
+            debug = True
+        else:
+            assert(value == 'custom')
+            return
+        self.options[OptionKey('optimization')].set_value(opt)
+        self.options[OptionKey('debug')].set_value(debug)
+
+    @staticmethod
+    def is_per_machine_option(optname: OptionKey) -> bool:
+        if optname.name in BUILTIN_OPTIONS_PER_MACHINE:
+            return True
+        return optname.lang is not None
+
+    def validate_option_value(self, option_name: OptionKey, override_value):
+        try:
+            opt = self.options[option_name]
+        except KeyError:
+            raise MesonException(f'Tried to validate unknown option {str(option_name)}')
+        try:
+            return opt.validate_value(override_value)
+        except MesonException as e:
+            raise type(e)(('Validation failed for option %s: ' % option_name) + str(e)) \
+                .with_traceback(sys.exc_info()[2])
+
+    def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.Union[str, T.List[str]]:
+        return self.options[OptionKey('args', machine=for_machine, lang=lang)].value
+
+    def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.Union[str, T.List[str]]:
+        return self.options[OptionKey('link_args', machine=for_machine, lang=lang)].value
+
+    def update_project_options(self, options: 'KeyedOptionDictType') -> None:
+        for key, value in options.items():
+            if not key.is_project():
+                continue
+            if key not in self.options:
+                self.options[key] = value
+                continue
+
+            oldval = self.options[key]
+            if type(oldval) != type(value):
+                self.options[key] = value
+            elif oldval.choices != value.choices:
+                # If the choices have changed, use the new value, but attempt
+                # to keep the old options. If they are not valid keep the new
+                # defaults but warn.
+                self.options[key] = value
+                try:
+                    value.set_value(oldval.value)
+                except MesonException as e:
+                    mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).')
+
+    def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+        if when_building_for == MachineChoice.BUILD:
+            return False
+        return len(self.cross_files) > 0
+
+    def copy_build_options_from_regular_ones(self) -> None:
+        assert not self.is_cross_build()
+        for k in BUILTIN_OPTIONS_PER_MACHINE:
+            o = self.options[k]
+            self.options[k.as_build()].set_value(o.value)
+        for bk, bv in self.options.items():
+            if bk.machine is MachineChoice.BUILD:
+                hk = bk.as_host()
+                try:
+                    hv = self.options[hk]
+                    bv.set_value(hv.value)
+                except KeyError:
+                    continue
+
+    def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', warn_unknown: bool = True) -> None:
+        if not self.is_cross_build():
+            options = {k: v for k, v in options.items() if k.machine is not MachineChoice.BUILD}
+        # Set prefix first because it's needed to sanitize other options
+        pfk = OptionKey('prefix')
+        if pfk in options:
+            prefix = self.sanitize_prefix(options[pfk])
+            self.options[OptionKey('prefix')].set_value(prefix)
+            for key in BULITIN_DIR_NOPREFIX_OPTIONS:
+                if key not in options:
+                    self.options[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
+
+        unknown_options: T.List[OptionKey] = []
+        for k, v in options.items():
+            if k == pfk:
+                continue
+            elif k not in self.options:
+                unknown_options.append(k)
+            else:
+                self.set_option(k, v)
+        if unknown_options and warn_unknown:
+            unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
+            sub = f'In subproject {subproject}: ' if subproject else ''
+            mlog.warning(f'{sub}Unknown options: "{unknown_options_str}"')
+            mlog.log('The value of new options can be set with:')
+            mlog.log(mlog.bold('meson setup  --reconfigure -Dnew_option=new_value ...'))
+        if not self.is_cross_build():
+            self.copy_build_options_from_regular_ones()
+
+    def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None:
+        # Preserve order: if env.options has 'buildtype' it must come after
+        # 'optimization' if it is in default_options.
+        options: T.MutableMapping[OptionKey, T.Any]
+        if not subproject:
+            options = OrderedDict(default_options)
+            options.update(env.options)
+            env.options = options
+
+        # Create a subset of options, keeping only project and builtin
+        # options for this subproject.
+        # Language and backend specific options will be set later when adding
+        # languages and setting the backend (builtin options must be set first
+        # to know which backend we'll use).
+        options = OrderedDict()
+
+        for k, v in chain(default_options.items(), env.options.items()):
+            # If this is a subproject, don't use other subproject options
+            if k.subproject and k.subproject != subproject:
+                continue
+            # If the option is a builtin and is yielding then it's not allowed per subproject.
+            #
+            # Always test this using the HOST machine, as many builtin options
+            # are not valid for the BUILD machine, but the yielding value does
+            # not differ between them even when they are valid for both.
+            if subproject and k.is_builtin() and self.options[k.evolve(subproject='', machine=MachineChoice.HOST)].yielding:
+                continue
+            # Skip base, compiler, and backend options, they are handled when
+            # adding languages and setting backend.
+            if k.type in {OptionType.COMPILER, OptionType.BACKEND, OptionType.BASE}:
+                continue
+            options[k] = v
+
+        self.set_options(options, subproject=subproject)
+
+    def add_compiler_options(self, options: 'KeyedOptionDictType', lang: str, for_machine: MachineChoice,
+                             env: 'Environment') -> None:
+        for k, o in options.items():
+            value = env.options.get(k)
+            if value is not None:
+                o.set_value(value)
+            self.options.setdefault(k, o)
+
+    def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
+                      for_machine: MachineChoice, env: 'Environment') -> None:
+        """Add global language arguments that are needed before compiler/linker detection."""
+        from .compilers import compilers
+        # These options are all new at this point, because the compiler is
+        # responsible for adding its own options, thus calling
+        # `self.options.update()`` is perfectly safe.
+        self.options.update(compilers.get_global_options(lang, comp, for_machine, env))
+
+    def process_new_compiler(self, lang: str, comp: 'Compiler', env: 'Environment') -> None:
+        from . import compilers
+
+        self.compilers[comp.for_machine][lang] = comp
+        self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env)
+
+        enabled_opts: T.List[OptionKey] = []
+        for key in comp.base_options:
+            if key in self.options:
+                continue
+            oobj = compilers.base_options[key]
+            if key in env.options:
+                oobj.set_value(env.options[key])
+                enabled_opts.append(key)
+            self.options[key] = oobj
+        self.emit_base_options_warnings(enabled_opts)
+
+    def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None:
+        if OptionKey('b_bitcode') in enabled_opts:
+            mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
+            mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
+
+class CmdLineFileParser(configparser.ConfigParser):
+    def __init__(self) -> None:
+        # We don't want ':' as key delimiter, otherwise it would break when
+        # storing subproject options like "subproject:option=value"
+        super().__init__(delimiters=['='], interpolation=None)
+
+    def optionxform(self, option: str) -> str:
+        # Don't call str.lower() on keys
+        return option
+
+class MachineFileParser():
+    def __init__(self, filenames: T.List[str]) -> None:
+        self.parser = CmdLineFileParser()
+        self.constants = {'True': True, 'False': False}
+        self.sections = {}
+
+        self.parser.read(filenames)
+
+        # Parse [constants] first so they can be used in other sections
+        if self.parser.has_section('constants'):
+            self.constants.update(self._parse_section('constants'))
+
+        for s in self.parser.sections():
+            if s == 'constants':
+                continue
+            self.sections[s] = self._parse_section(s)
+
+    def _parse_section(self, s):
+        self.scope = self.constants.copy()
+        section = {}
+        for entry, value in self.parser.items(s):
+            if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+                raise EnvironmentException(f'Malformed variable name {entry!r} in machine file.')
+            # Windows paths...
+            value = value.replace('\\', '\\\\')
+            try:
+                ast = mparser.Parser(value, 'machinefile').parse()
+                res = self._evaluate_statement(ast.lines[0])
+            except MesonException:
+                raise EnvironmentException(f'Malformed value in machine file variable {entry!r}.')
+            except KeyError as e:
+                raise EnvironmentException(f'Undefined constant {e.args[0]!r} in machine file variable {entry!r}.')
+            section[entry] = res
+            self.scope[entry] = res
+        return section
+
+    def _evaluate_statement(self, node):
+        if isinstance(node, (mparser.StringNode)):
+            return node.value
+        elif isinstance(node, mparser.BooleanNode):
+            return node.value
+        elif isinstance(node, mparser.NumberNode):
+            return node.value
+        elif isinstance(node, mparser.ArrayNode):
+            return [self._evaluate_statement(arg) for arg in node.args.arguments]
+        elif isinstance(node, mparser.IdNode):
+            return self.scope[node.value]
+        elif isinstance(node, mparser.ArithmeticNode):
+            l = self._evaluate_statement(node.left)
+            r = self._evaluate_statement(node.right)
+            if node.operation == 'add':
+                if (isinstance(l, str) and isinstance(r, str)) or \
+                   (isinstance(l, list) and isinstance(r, list)):
+                    return l + r
+            elif node.operation == 'div':
+                if isinstance(l, str) and isinstance(r, str):
+                    return os.path.join(l, r)
+        raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames):
+    parser = MachineFileParser(filenames)
+    return parser.sections
+
+def get_cmd_line_file(build_dir: str) -> str:
+    return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
+
+def read_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
+    filename = get_cmd_line_file(build_dir)
+    if not os.path.isfile(filename):
+        return
+
+    config = CmdLineFileParser()
+    config.read(filename)
+
+    # Do a copy because config is not really a dict. options.cmd_line_options
+    # overrides values from the file.
+    d = {OptionKey.from_string(k): v for k, v in config['options'].items()}
+    d.update(options.cmd_line_options)
+    options.cmd_line_options = d
+
+    properties = config['properties']
+    if not options.cross_file:
+        options.cross_file = ast.literal_eval(properties.get('cross_file', '[]'))
+    if not options.native_file:
+        # This will be a string in the form: "['first', 'second', ...]", use
+        # literal_eval to get it into the list of strings.
+        options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
+
+def write_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
+    filename = get_cmd_line_file(build_dir)
+    config = CmdLineFileParser()
+
+    properties = OrderedDict()
+    if options.cross_file:
+        properties['cross_file'] = options.cross_file
+    if options.native_file:
+        properties['native_file'] = options.native_file
+
+    config['options'] = {str(k): str(v) for k, v in options.cmd_line_options.items()}
+    config['properties'] = properties
+    with open(filename, 'w', encoding='utf-8') as f:
+        config.write(f)
+
+def update_cmd_line_file(build_dir: str, options: argparse.Namespace):
+    filename = get_cmd_line_file(build_dir)
+    config = CmdLineFileParser()
+    config.read(filename)
+    config['options'].update({str(k): str(v) for k, v in options.cmd_line_options.items()})
+    with open(filename, 'w', encoding='utf-8') as f:
+        config.write(f)
+
+def get_cmd_line_options(build_dir: str, options: argparse.Namespace) -> str:
+    copy = argparse.Namespace(**vars(options))
+    read_cmd_line_file(build_dir, copy)
+    cmdline = ['-D{}={}'.format(str(k), v) for k, v in copy.cmd_line_options.items()]
+    if options.cross_file:
+        cmdline += [f'--cross-file {f}' for f in options.cross_file]
+    if options.native_file:
+        cmdline += [f'--native-file {f}' for f in options.native_file]
+    return ' '.join([shlex.quote(x) for x in cmdline])
+
+def major_versions_differ(v1: str, v2: str) -> bool:
+    return v1.split('.')[0:2] != v2.split('.')[0:2]
+
+def load(build_dir: str) -> CoreData:
+    filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
+    load_fail_msg = f'Coredata file {filename!r} is corrupted. Try with a fresh build tree.'
+    try:
+        with open(filename, 'rb') as f:
+            obj = pickle.load(f)
+    except (pickle.UnpicklingError, EOFError):
+        raise MesonException(load_fail_msg)
+    except (ModuleNotFoundError, AttributeError):
+        raise MesonException(
+            f"Coredata file {filename!r} references functions or classes that don't "
+            "exist. This probably means that it was generated with an old "
+            "version of meson.")
+    if not isinstance(obj, CoreData):
+        raise MesonException(load_fail_msg)
+    if major_versions_differ(obj.version, version):
+        raise MesonVersionMismatchException(obj.version, version)
+    return obj
+
+def save(obj: CoreData, build_dir: str) -> str:
+    filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
+    prev_filename = filename + '.prev'
+    tempfilename = filename + '~'
+    if major_versions_differ(obj.version, version):
+        raise MesonException('Fatal version mismatch corruption.')
+    if os.path.exists(filename):
+        import shutil
+        shutil.copyfile(filename, prev_filename)
+    with open(tempfilename, 'wb') as f:
+        pickle.dump(obj, f)
+        f.flush()
+        os.fsync(f.fileno())
+    os.replace(tempfilename, filename)
+    return filename
+
+
+def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
+    for n, b in BUILTIN_OPTIONS.items():
+        b.add_to_argparse(str(n), parser, '')
+    for n, b in BUILTIN_OPTIONS_PER_MACHINE.items():
+        b.add_to_argparse(str(n), parser, ' (just for host machine)')
+        b.add_to_argparse(str(n.as_build()), parser, ' (just for build machine)')
+    parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
+                        help='Set the value of an option, can be used several times to set multiple options.')
+
+def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[OptionKey, str]:
+    result: T.OrderedDict[OptionKey, str] = OrderedDict()
+    for o in options:
+        try:
+            (key, value) = o.split('=', 1)
+        except ValueError:
+            raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
+        k = OptionKey.from_string(key)
+        if subproject:
+            k = k.evolve(subproject=subproject)
+        result[k] = value
+    return result
+
+def parse_cmd_line_options(args: argparse.Namespace) -> None:
+    args.cmd_line_options = create_options_dict(args.projectoptions)
+
+    # Merge builtin options set with --option into the dict.
+    for key in chain(
+            BUILTIN_OPTIONS.keys(),
+            (k.as_build() for k in BUILTIN_OPTIONS_PER_MACHINE.keys()),
+            BUILTIN_OPTIONS_PER_MACHINE.keys(),
+    ):
+        name = str(key)
+        value = getattr(args, name, None)
+        if value is not None:
+            if key in args.cmd_line_options:
+                cmdline_name = BuiltinOption.argparse_name_to_arg(name)
+                raise MesonException(
+                    f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.')
+            args.cmd_line_options[key] = value
+            delattr(args, name)
+
+
+_U = T.TypeVar('_U', bound=UserOption[_T])
+
+class BuiltinOption(T.Generic[_T, _U]):
+
+    """Class for a builtin option type.
+
+    There are some cases that are not fully supported yet.
+    """
+
+    def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *,
+                 choices: T.Any = None):
+        self.opt_type = opt_type
+        self.description = description
+        self.default = default
+        self.choices = choices
+        self.yielding = yielding
+
+    def init_option(self, name: 'OptionKey', value: T.Optional[T.Any], prefix: str) -> _U:
+        """Create an instance of opt_type and return it."""
+        if value is None:
+            value = self.prefixed_default(name, prefix)
+        keywords = {'yielding': self.yielding, 'value': value}
+        if self.choices:
+            keywords['choices'] = self.choices
+        return self.opt_type(self.description, **keywords)
+
+    def _argparse_action(self) -> T.Optional[str]:
+        # If the type is a boolean, the presence of the argument in --foo form
+        # is to enable it. Disabling happens by using -Dfoo=false, which is
+        # parsed under `args.projectoptions` and does not hit this codepath.
+        if isinstance(self.default, bool):
+            return 'store_true'
+        return None
+
+    def _argparse_choices(self) -> T.Any:
+        if self.opt_type is UserBooleanOption:
+            return [True, False]
+        elif self.opt_type is UserFeatureOption:
+            return UserFeatureOption.static_choices
+        return self.choices
+
+    @staticmethod
+    def argparse_name_to_arg(name: str) -> str:
+        if name == 'warning_level':
+            return '--warnlevel'
+        else:
+            return '--' + name.replace('_', '-')
+
+    def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any:
+        if self.opt_type in [UserComboOption, UserIntegerOption]:
+            return self.default
+        try:
+            return BULITIN_DIR_NOPREFIX_OPTIONS[name][prefix]
+        except KeyError:
+            pass
+        return self.default
+
+    def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None:
+        kwargs = OrderedDict()
+
+        c = self._argparse_choices()
+        b = self._argparse_action()
+        h = self.description
+        if not b:
+            h = '{} (default: {}).'.format(h.rstrip('.'), self.prefixed_default(name))
+        else:
+            kwargs['action'] = b
+        if c and not b:
+            kwargs['choices'] = c
+        kwargs['default'] = argparse.SUPPRESS
+        kwargs['dest'] = name
+
+        cmdline_name = self.argparse_name_to_arg(name)
+        parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
+
+
+# Update `docs/markdown/Builtin-options.md` after changing the options below
+# Also update mesonlib._BUILTIN_NAMES. See the comment there for why this is required.
+BUILTIN_DIR_OPTIONS: 'KeyedOptionDictType' = OrderedDict([
+    (OptionKey('prefix'),          BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
+    (OptionKey('bindir'),          BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
+    (OptionKey('datadir'),         BuiltinOption(UserStringOption, 'Data file directory', 'share')),
+    (OptionKey('includedir'),      BuiltinOption(UserStringOption, 'Header file directory', 'include')),
+    (OptionKey('infodir'),         BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
+    (OptionKey('libdir'),          BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
+    (OptionKey('libexecdir'),      BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
+    (OptionKey('localedir'),       BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
+    (OptionKey('localstatedir'),   BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
+    (OptionKey('mandir'),          BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')),
+    (OptionKey('sbindir'),         BuiltinOption(UserStringOption, 'System executable directory', 'sbin')),
+    (OptionKey('sharedstatedir'),  BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
+    (OptionKey('sysconfdir'),      BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')),
+])
+
+BUILTIN_CORE_OPTIONS: 'KeyedOptionDictType' = OrderedDict([
+    (OptionKey('auto_features'),   BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
+    (OptionKey('backend'),         BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)),
+    (OptionKey('buildtype'),       BuiltinOption(UserComboOption, 'Build type to use', 'debug',
+                                                 choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])),
+    (OptionKey('debug'),           BuiltinOption(UserBooleanOption, 'Debug', True)),
+    (OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'],
+                                                 yielding=False)),
+    (OptionKey('errorlogs'),       BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)),
+    (OptionKey('install_umask'),   BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')),
+    (OptionKey('layout'),          BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])),
+    (OptionKey('optimization'),    BuiltinOption(UserComboOption, 'Optimization level', '0', choices=['0', 'g', '1', '2', '3', 's'])),
+    (OptionKey('stdsplit'),        BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)),
+    (OptionKey('strip'),           BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
+    (OptionKey('unity'),           BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
+    (OptionKey('unity_size'),      BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
+    (OptionKey('warning_level'),   BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)),
+    (OptionKey('werror'),          BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
+    (OptionKey('wrap_mode'),       BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])),
+    (OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
+])
+
+BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
+
+BUILTIN_OPTIONS_PER_MACHINE: 'KeyedOptionDictType' = OrderedDict([
+    (OptionKey('pkg_config_path'), BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
+    (OptionKey('cmake_prefix_path'), BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])),
+])
+
+# Special prefix-dependent defaults for installation directories that reside in
+# a path outside of the prefix in FHS and common usage.
+BULITIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = {
+    OptionKey('sysconfdir'):     {'/usr': '/etc'},
+    OptionKey('localstatedir'):  {'/usr': '/var',     '/usr/local': '/var/local'},
+    OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},
+}
+
+FORBIDDEN_TARGET_NAMES = {'clean': None,
+                          'clean-ctlist': None,
+                          'clean-gcno': None,
+                          'clean-gcda': None,
+                          'coverage': None,
+                          'coverage-text': None,
+                          'coverage-xml': None,
+                          'coverage-html': None,
+                          'phony': None,
+                          'PHONY': None,
+                          'all': None,
+                          'test': None,
+                          'benchmark': None,
+                          'install': None,
+                          'uninstall': None,
+                          'build.ninja': None,
+                          'scan-build': None,
+                          'reconfigure': None,
+                          'dist': None,
+                          'distcheck': None,
+                          }
+
diff --git a/meson/mesonbuild/dependencies/__init__.py b/meson/mesonbuild/dependencies/__init__.py
new file mode 100644
index 000000000..bd90c90e9
--- /dev/null
+++ b/meson/mesonbuild/dependencies/__init__.py
@@ -0,0 +1,275 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .boost import BoostDependency
+from .cuda import CudaDependency
+from .hdf5 import hdf5_factory
+from .base import Dependency, InternalDependency, ExternalDependency, NotFoundDependency
+from .base import (
+        ExternalLibrary, DependencyException, DependencyMethods,
+        BuiltinDependency, SystemDependency)
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .dub import DubDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import DependencyFactory
+from .detect import find_external_dependency, get_dep_identifier, packages, _packages_accept_language
+from .dev import (
+    ValgrindDependency, JDKSystemDependency, gmock_factory, gtest_factory,
+    llvm_factory, zlib_factory)
+from .coarrays import coarray_factory
+from .mpi import mpi_factory
+from .scalapack import scalapack_factory
+from .misc import (
+    BlocksDependency, OpenMPDependency, cups_factory, curses_factory, gpgme_factory,
+    libgcrypt_factory, libwmf_factory, netcdf_factory, pcap_factory, python3_factory,
+    shaderc_factory, threads_factory, ThreadDependency, intl_factory,
+)
+from .platform import AppleFrameworks
+from .qt import qt4_factory, qt5_factory, qt6_factory
+from .ui import GnuStepDependency, WxDependency, gl_factory, sdl2_factory, vulkan_factory
+
+__all__ = [
+    'Dependency',
+    'InternalDependency',
+    'ExternalDependency',
+    'SystemDependency',
+    'BuiltinDependency',
+    'NotFoundDependency',
+    'ExternalLibrary',
+    'DependencyException',
+    'DependencyMethods',
+
+    'CMakeDependency',
+    'ConfigToolDependency',
+    'DubDependency',
+    'ExtraFrameworkDependency',
+    'PkgConfigDependency',
+
+    'DependencyFactory',
+
+    'ThreadDependency',
+
+    'find_external_dependency',
+    'get_dep_identifier',
+]
+
+"""Dependency representations and discovery logic.
+
+Meson attempts to largely abstract away dependency discovery information, and
+to encapsulate that logic itself so that the DSL doesn't have too much direct
+information. There are some cases where this is impossible/undesirable, such
+as the `get_variable()` method.
+
+Meson has four primary dependency types:
+  1. pkg-config
+  2. apple frameworks
+  3. CMake
+  4. system
+
+Plus a few more niche ones.
+
+When a user calls `dependency('foo')` Meson creates a list of candidates, and
+tries those candidates in order to find one that matches the criteria
+provided by the user (such as version requirements, or optional components
+that are required.)
+
+Except to work around bugs or handle odd corner cases, pkg-config and CMake
+generally just workâ„¢, though there are exceptions. Most of this package is
+concerned with dependencies that don't (always) provide CMake and/or
+pkg-config files.
+
+For these cases one needs to write a `system` dependency. These dependencies
+descend directly from `ExternalDependency`, in their constructor they
+manually set up the necessary link and compile args (and additional
+dependencies as necessary).
+
+For example, imagine a dependency called Foo, it uses an environment variable
+called `$FOO_ROOT` to point to its install root, which looks like this:
+```txt
+$FOOROOT
+→ include/
+→ lib/
+```
+To use Foo, you need its include directory, and you need to link to
+`lib/libfoo.ext`.
+
+You could write code that looks like:
+
+```python
+class FooSystemDependency(ExternalDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        root = os.environ.get('FOO_ROOT')
+        if root is None:
+            mlog.debug('$FOO_ROOT is unset.')
+            self.is_found = False
+            return
+
+        lib = self.clib_compiler.find_library('foo', environment, [os.path.join(root, 'lib')])
+        if lib is None:
+            mlog.debug('Could not find lib.')
+            self.is_found = False
+            return
+
+        self.compile_args.append(f'-I{os.path.join(root, "include")}')
+        self.link_args.append(lib)
+        self.is_found = True
+```
+
+This code will look for `FOO_ROOT` in the environment, handle `FOO_ROOT` being
+undefined gracefully, then set its `compile_args` and `link_args` gracefully.
+It will also gracefully handle not finding the required lib (hopefully that
+doesn't happen, but it could if, for example, the lib is only static and
+shared linking is requested).
+
+There are a couple of things about this that still aren't ideal. For one, we
+don't want to be reading random environment variables at this point. Those
+should actually be added to `envconfig.Properties` and read in
+`environment.Environment._set_default_properties_from_env` (see how
+`BOOST_ROOT` is handled). We can also handle the `static` keyword. So
+now that becomes:
+
+```python
+class FooSystemDependency(ExternalDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        root = environment.properties[self.for_machine].foo_root
+        if root is None:
+            mlog.debug('foo_root is unset.')
+            self.is_found = False
+            return
+
+        static = Mesonlib.LibType.STATIC if kwargs.get('static', False) else Mesonlib.LibType.SHARED
+        lib = self.clib_compiler.find_library(
+            'foo', environment, [os.path.join(root, 'lib')], libtype=static)
+        if lib is None:
+            mlog.debug('Could not find lib.')
+            self.is_found = False
+            return
+
+        self.compile_args.append(f'-I{os.path.join(root, "include")}')
+        self.link_args.append(lib)
+        self.is_found = True
+```
+
+This is nicer in a couple of ways. First we can properly cross compile as we
+are allowed to set `FOO_ROOT` for both the build and host machines, it also
+means that users can override this in their machine files, and if that
+environment variables changes during a Meson reconfigure Meson won't re-read
+it, this is important for reproducibility. Finally, Meson will figure out
+whether it should be finding `libfoo.so` or `libfoo.a` (or the platform
+specific names). Things are looking pretty good now, so it can be added to
+the `packages` dict below:
+
+```python
+packages.update({
+    'foo': FooSystemDependency,
+})
+```
+
+Now, what if foo also provides pkg-config, but it's only shipped on Unices,
+or only included in very recent versions of the dependency? We can use the
+`DependencyFactory` class:
+
+```python
+foo_factory = DependencyFactory(
+    'foo',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=FooSystemDependency,
+)
+```
+
+This is a helper function that will generate a default pkg-config based
+dependency, and use the `FooSystemDependency` as well. It can also handle
+custom finders for pkg-config and cmake based dependencies that need some
+extra help. You would then add the `foo_factory` to packages instead of
+`FooSystemDependency`:
+
+```python
+packages.update({
+    'foo': foo_factory,
+})
+```
+
+If you have a dependency that is very complicated, (such as having multiple
+implementations) you may need to write your own factory function. There are a
+number of examples in this package.
+
+_Note_ before we moved to factory functions it was common to use an
+`ExternalDependency` class that would instantiate different types of
+dependencies and hold the one it found. There are a number of drawbacks to
+this approach, and no new dependencies should do this.
+"""
+
+# This is a dict where the keys should be strings, and the values must be one
+# of:
+# - An ExternalDependency subclass
+# - A DependencyFactory object
+# - A callable with a signature of (Environment, MachineChoice, Dict[str, Any]) -> List[Callable[[], ExternalDependency]]
+packages.update({
+    # From dev:
+    'gtest': gtest_factory,
+    'gmock': gmock_factory,
+    'llvm': llvm_factory,
+    'valgrind': ValgrindDependency,
+    'zlib': zlib_factory,
+    'jdk': JDKSystemDependency,
+
+    'boost': BoostDependency,
+    'cuda': CudaDependency,
+
+    # per-file
+    'coarray': coarray_factory,
+    'hdf5': hdf5_factory,
+    'mpi': mpi_factory,
+    'scalapack': scalapack_factory,
+
+    # From misc:
+    'blocks': BlocksDependency,
+    'curses': curses_factory,
+    'netcdf': netcdf_factory,
+    'openmp': OpenMPDependency,
+    'python3': python3_factory,
+    'threads': threads_factory,
+    'pcap': pcap_factory,
+    'cups': cups_factory,
+    'libwmf': libwmf_factory,
+    'libgcrypt': libgcrypt_factory,
+    'gpgme': gpgme_factory,
+    'shaderc': shaderc_factory,
+    'intl': intl_factory,
+
+    # From platform:
+    'appleframeworks': AppleFrameworks,
+
+    # From ui:
+    'gl': gl_factory,
+    'gnustep': GnuStepDependency,
+    'qt4': qt4_factory,
+    'qt5': qt5_factory,
+    'qt6': qt6_factory,
+    'sdl2': sdl2_factory,
+    'wxwidgets': WxDependency,
+    'vulkan': vulkan_factory,
+})
+_packages_accept_language.update({
+    'hdf5',
+    'mpi',
+    'netcdf',
+    'openmp',
+})
diff --git a/meson/mesonbuild/dependencies/base.py b/meson/mesonbuild/dependencies/base.py
new file mode 100644
index 000000000..1882246bf
--- /dev/null
+++ b/meson/mesonbuild/dependencies/base.py
@@ -0,0 +1,573 @@
+# Copyright 2013-2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies.
+# Custom logic for several other packages are in separate files.
+import copy
+import os
+import itertools
+import typing as T
+from enum import Enum
+
+from .. import mlog
+from ..compilers import clib_langs
+from ..mesonlib import MachineChoice, MesonException, HoldableObject
+from ..mesonlib import version_compare_many
+from ..interpreterbase import FeatureDeprecated
+
+if T.TYPE_CHECKING:
+    from ..compilers.compilers import Compiler
+    from ..environment import Environment
+    from ..build import BuildTarget
+    from ..mesonlib import FileOrString
+
+
+class DependencyException(MesonException):
+    '''Exceptions raised while trying to find dependencies'''
+
+
+class DependencyMethods(Enum):
+    # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
+    AUTO = 'auto'
+    PKGCONFIG = 'pkg-config'
+    CMAKE = 'cmake'
+    # The dependency is provided by the standard library and does not need to be linked
+    BUILTIN = 'builtin'
+    # Just specify the standard link arguments, assuming the operating system provides the library.
+    SYSTEM = 'system'
+    # This is only supported on OSX - search the frameworks directory by name.
+    EXTRAFRAMEWORK = 'extraframework'
+    # Detect using the sysconfig module.
+    SYSCONFIG = 'sysconfig'
+    # Specify using a "program"-config style tool
+    CONFIG_TOOL = 'config-tool'
+    # For backwards compatibility
+    SDLCONFIG = 'sdlconfig'
+    CUPSCONFIG = 'cups-config'
+    PCAPCONFIG = 'pcap-config'
+    LIBWMFCONFIG = 'libwmf-config'
+    QMAKE = 'qmake'
+    # Misc
+    DUB = 'dub'
+
+
+DependencyTypeName = T.NewType('DependencyTypeName', str)
+
+
+class Dependency(HoldableObject):
+
+    @classmethod
+    def _process_include_type_kw(cls, kwargs: T.Dict[str, T.Any]) -> str:
+        if 'include_type' not in kwargs:
+            return 'preserve'
+        if not isinstance(kwargs['include_type'], str):
+            raise DependencyException('The include_type kwarg must be a string type')
+        if kwargs['include_type'] not in ['preserve', 'system', 'non-system']:
+            raise DependencyException("include_type may only be one of ['preserve', 'system', 'non-system']")
+        return kwargs['include_type']
+
+    def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> None:
+        self.name = "null"
+        self.version:  T.Optional[str] = None
+        self.language: T.Optional[str] = None # None means C-like
+        self.is_found = False
+        self.type_name = type_name
+        self.compile_args: T.List[str] = []
+        self.link_args:    T.List[str] = []
+        # Raw -L and -l arguments without manual library searching
+        # If None, self.link_args will be used
+        self.raw_link_args: T.Optional[T.List[str]] = None
+        self.sources: T.List['FileOrString'] = []
+        self.methods = process_method_kw(self.get_methods(), kwargs)
+        self.include_type = self._process_include_type_kw(kwargs)
+        self.ext_deps: T.List[Dependency] = []
+
+    def __repr__(self) -> str:
+        return f'<{self.__class__.__name__} {self.name}: {self.is_found}>'
+
+    def is_built(self) -> bool:
+        return False
+
+    def summary_value(self) -> T.Union[str, mlog.AnsiDecorator, mlog.AnsiText]:
+        if not self.found():
+            return mlog.red('NO')
+        if not self.version:
+            return mlog.green('YES')
+        return mlog.AnsiText(mlog.green('YES'), ' ', mlog.cyan(self.version))
+
+    def get_compile_args(self) -> T.List[str]:
+        if self.include_type == 'system':
+            converted = []
+            for i in self.compile_args:
+                if i.startswith('-I') or i.startswith('/I'):
+                    converted += ['-isystem' + i[2:]]
+                else:
+                    converted += [i]
+            return converted
+        if self.include_type == 'non-system':
+            converted = []
+            for i in self.compile_args:
+                if i.startswith('-isystem'):
+                    converted += ['-I' + i[8:]]
+                else:
+                    converted += [i]
+            return converted
+        return self.compile_args
+
+    def get_all_compile_args(self) -> T.List[str]:
+        """Get the compile arguments from this dependency and it's sub dependencies."""
+        return list(itertools.chain(self.get_compile_args(),
+                                    *[d.get_all_compile_args() for d in self.ext_deps]))
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        if raw and self.raw_link_args is not None:
+            return self.raw_link_args
+        return self.link_args
+
+    def get_all_link_args(self) -> T.List[str]:
+        """Get the link arguments from this dependency and it's sub dependencies."""
+        return list(itertools.chain(self.get_link_args(),
+                                    *[d.get_all_link_args() for d in self.ext_deps]))
+
+    def found(self) -> bool:
+        return self.is_found
+
+    def get_sources(self) -> T.List['FileOrString']:
+        """Source files that need to be added to the target.
+        As an example, gtest-all.cc when using GTest."""
+        return self.sources
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.AUTO]
+
+    def get_name(self) -> str:
+        return self.name
+
+    def get_version(self) -> str:
+        if self.version:
+            return self.version
+        else:
+            return 'unknown'
+
+    def get_include_type(self) -> str:
+        return self.include_type
+
+    def get_exe_args(self, compiler: 'Compiler') -> T.List[str]:
+        return []
+
+    def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
+        raise DependencyException(f'{self.name!r} is not a pkgconfig dependency')
+
+    def get_configtool_variable(self, variable_name: str) -> str:
+        raise DependencyException(f'{self.name!r} is not a config-tool dependency')
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'Dependency':
+        """Create a new dependency that contains part of the parent dependency.
+
+        The following options can be inherited:
+            links -- all link_with arguments
+            includes -- all include_directory and -I/-isystem calls
+            sources -- any source, header, or generated sources
+            compile_args -- any compile args
+            link_args -- any link args
+
+        Additionally the new dependency will have the version parameter of it's
+        parent (if any) and the requested values of any dependencies will be
+        added as well.
+        """
+        raise RuntimeError('Unreachable code in partial_dependency called')
+
+    def _add_sub_dependency(self, deplist: T.Iterable[T.Callable[[], 'Dependency']]) -> bool:
+        """Add an internal depdency from a list of possible dependencies.
+
+        This method is intended to make it easier to add additional
+        dependencies to another dependency internally.
+
+        Returns true if the dependency was successfully added, false
+        otherwise.
+        """
+        for d in deplist:
+            dep = d()
+            if dep.is_found:
+                self.ext_deps.append(dep)
+                return True
+        return False
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'No default provided for dependency {self!r}, which is not pkg-config, cmake, or config-tool based.')
+
+    def generate_system_dependency(self, include_type: str) -> 'Dependency':
+        new_dep = copy.deepcopy(self)
+        new_dep.include_type = self._process_include_type_kw({'include_type': include_type})
+        return new_dep
+
+class InternalDependency(Dependency):
+    def __init__(self, version: str, incdirs: T.List[str], compile_args: T.List[str],
+                 link_args: T.List[str], libraries: T.List['BuildTarget'],
+                 whole_libraries: T.List['BuildTarget'], sources: T.List['FileOrString'],
+                 ext_deps: T.List[Dependency], variables: T.Dict[str, T.Any]):
+        super().__init__(DependencyTypeName('internal'), {})
+        self.version = version
+        self.is_found = True
+        self.include_directories = incdirs
+        self.compile_args = compile_args
+        self.link_args = link_args
+        self.libraries = libraries
+        self.whole_libraries = whole_libraries
+        self.sources = sources
+        self.ext_deps = ext_deps
+        self.variables = variables
+
+    def __deepcopy__(self, memo: T.Dict[int, 'InternalDependency']) -> 'InternalDependency':
+        result = self.__class__.__new__(self.__class__)
+        assert isinstance(result, InternalDependency)
+        memo[id(self)] = result
+        for k, v in self.__dict__.items():
+            if k in ['libraries', 'whole_libraries']:
+                setattr(result, k, copy.copy(v))
+            else:
+                setattr(result, k, copy.deepcopy(v, memo))
+        return result
+
+    def summary_value(self) -> mlog.AnsiDecorator:
+        # Omit the version.  Most of the time it will be just the project
+        # version, which is uninteresting in the summary.
+        return mlog.green('YES')
+
+    def is_built(self) -> bool:
+        if self.sources or self.libraries or self.whole_libraries:
+            return True
+        return any(d.is_built() for d in self.ext_deps)
+
+    def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
+        raise DependencyException('Method "get_pkgconfig_variable()" is '
+                                  'invalid for an internal dependency')
+
+    def get_configtool_variable(self, variable_name: str) -> str:
+        raise DependencyException('Method "get_configtool_variable()" is '
+                                  'invalid for an internal dependency')
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'InternalDependency':
+        final_compile_args = self.compile_args.copy() if compile_args else []
+        final_link_args = self.link_args.copy() if link_args else []
+        final_libraries = self.libraries.copy() if links else []
+        final_whole_libraries = self.whole_libraries.copy() if links else []
+        final_sources = self.sources.copy() if sources else []
+        final_includes = self.include_directories.copy() if includes else []
+        final_deps = [d.get_partial_dependency(
+            compile_args=compile_args, link_args=link_args, links=links,
+            includes=includes, sources=sources) for d in self.ext_deps]
+        return InternalDependency(
+            self.version, final_includes, final_compile_args,
+            final_link_args, final_libraries, final_whole_libraries,
+            final_sources, final_deps, self.variables)
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+        val = self.variables.get(internal, default_value)
+        if val is not None:
+            # TODO: Try removing this assert by better typing self.variables
+            if isinstance(val, str):
+                return val
+            if isinstance(val, list):
+                for i in val:
+                    assert isinstance(i, str)
+                return val
+        raise DependencyException(f'Could not get an internal variable and no default provided for {self!r}')
+
+    def generate_link_whole_dependency(self) -> Dependency:
+        new_dep = copy.deepcopy(self)
+        new_dep.whole_libraries += new_dep.libraries
+        new_dep.libraries = []
+        return new_dep
+
+class HasNativeKwarg:
+    def __init__(self, kwargs: T.Dict[str, T.Any]):
+        self.for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+    def get_for_machine_from_kwargs(self, kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+        return MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
+
+class ExternalDependency(Dependency, HasNativeKwarg):
+    def __init__(self, type_name: DependencyTypeName, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        Dependency.__init__(self, type_name, kwargs)
+        self.env = environment
+        self.name = type_name # default
+        self.is_found = False
+        self.language = language
+        self.version_reqs = kwargs.get('version', None)
+        if isinstance(self.version_reqs, str):
+            self.version_reqs = [self.version_reqs]
+        self.required = kwargs.get('required', True)
+        self.silent = kwargs.get('silent', False)
+        self.static = kwargs.get('static', False)
+        if not isinstance(self.static, bool):
+            raise DependencyException('Static keyword must be boolean')
+        # Is this dependency to be run on the build platform?
+        HasNativeKwarg.__init__(self, kwargs)
+        self.clib_compiler = detect_compiler(self.name, environment, self.for_machine, self.language)
+
+    def get_compiler(self) -> 'Compiler':
+        return self.clib_compiler
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> Dependency:
+        new = copy.copy(self)
+        if not compile_args:
+            new.compile_args = []
+        if not link_args:
+            new.link_args = []
+        if not sources:
+            new.sources = []
+        if not includes:
+            pass # TODO maybe filter compile_args?
+        if not sources:
+            new.sources = []
+
+        return new
+
+    def log_details(self) -> str:
+        return ''
+
+    def log_info(self) -> str:
+        return ''
+
+    def log_tried(self) -> str:
+        return ''
+
+    # Check if dependency version meets the requirements
+    def _check_version(self) -> None:
+        if not self.is_found:
+            return
+
+        if self.version_reqs:
+            # an unknown version can never satisfy any requirement
+            if not self.version:
+                self.is_found = False
+                found_msg: mlog.TV_LoggableList = []
+                found_msg += ['Dependency', mlog.bold(self.name), 'found:']
+                found_msg += [mlog.red('NO'), 'unknown version, but need:', self.version_reqs]
+                mlog.log(*found_msg)
+
+                if self.required:
+                    m = f'Unknown version of dependency {self.name!r}, but need {self.version_reqs!r}.'
+                    raise DependencyException(m)
+
+            else:
+                (self.is_found, not_found, found) = \
+                    version_compare_many(self.version, self.version_reqs)
+                if not self.is_found:
+                    found_msg = ['Dependency', mlog.bold(self.name), 'found:']
+                    found_msg += [mlog.red('NO'),
+                                  'found', mlog.normal_cyan(self.version), 'but need:',
+                                  mlog.bold(', '.join([f"'{e}'" for e in not_found]))]
+                    if found:
+                        found_msg += ['; matched:',
+                                      ', '.join([f"'{e}'" for e in found])]
+                    mlog.log(*found_msg)
+
+                    if self.required:
+                        m = 'Invalid version of dependency, need {!r} {!r} found {!r}.'
+                        raise DependencyException(m.format(self.name, not_found, self.version))
+                    return
+
+
+class NotFoundDependency(Dependency):
+    def __init__(self, environment: 'Environment') -> None:
+        super().__init__(DependencyTypeName('not-found'), {})
+        self.env = environment
+        self.name = 'not-found'
+        self.is_found = False
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'NotFoundDependency':
+        return copy.copy(self)
+
+
+class ExternalLibrary(ExternalDependency):
+    def __init__(self, name: str, link_args: T.List[str], environment: 'Environment',
+                 language: str, silent: bool = False) -> None:
+        super().__init__(DependencyTypeName('library'), environment, {}, language=language)
+        self.name = name
+        self.language = language
+        self.is_found = False
+        if link_args:
+            self.is_found = True
+            self.link_args = link_args
+        if not silent:
+            if self.is_found:
+                mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES'))
+            else:
+                mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO'))
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        '''
+        External libraries detected using a compiler must only be used with
+        compatible code. For instance, Vala libraries (.vapi files) cannot be
+        used with C code, and not all Rust library types can be linked with
+        C-like code. Note that C++ libraries *can* be linked with C code with
+        a C++ linker (and vice-versa).
+        '''
+        # Using a vala library in a non-vala target, or a non-vala library in a vala target
+        # XXX: This should be extended to other non-C linkers such as Rust
+        if (self.language == 'vala' and language != 'vala') or \
+           (language == 'vala' and self.language != 'vala'):
+            return []
+        return super().get_link_args(language=language, raw=raw)
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'ExternalLibrary':
+        # External library only has link_args, so ignore the rest of the
+        # interface.
+        new = copy.copy(self)
+        if not link_args:
+            new.link_args = []
+        return new
+
+
+def sort_libpaths(libpaths: T.List[str], refpaths: T.List[str]) -> T.List[str]:
+    """Sort  according to 
+
+    It is intended to be used to sort -L flags returned by pkg-config.
+    Pkg-config returns flags in random order which cannot be relied on.
+    """
+    if len(refpaths) == 0:
+        return list(libpaths)
+
+    def key_func(libpath: str) -> T.Tuple[int, int]:
+        common_lengths: T.List[int] = []
+        for refpath in refpaths:
+            try:
+                common_path: str = os.path.commonpath([libpath, refpath])
+            except ValueError:
+                common_path = ''
+            common_lengths.append(len(common_path))
+        max_length = max(common_lengths)
+        max_index = common_lengths.index(max_length)
+        reversed_max_length = len(refpaths[max_index]) - max_length
+        return (max_index, reversed_max_length)
+    return sorted(libpaths, key=key_func)
+
+def strip_system_libdirs(environment: 'Environment', for_machine: MachineChoice, link_args: T.List[str]) -> T.List[str]:
+    """Remove -L arguments.
+
+    leaving these in will break builds where a user has a version of a library
+    in the system path, and a different version not in the system path if they
+    want to link against the non-system path version.
+    """
+    exclude = {f'-L{p}' for p in environment.get_compiler_system_dirs(for_machine)}
+    return [l for l in link_args if l not in exclude]
+
+def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]:
+    method = kwargs.get('method', 'auto')  # type: T.Union[DependencyMethods, str]
+    if isinstance(method, DependencyMethods):
+        return [method]
+    # TODO: try/except?
+    if method not in [e.value for e in DependencyMethods]:
+        raise DependencyException(f'method {method!r} is invalid')
+    method = DependencyMethods(method)
+
+    # This sets per-tool config methods which are deprecated to to the new
+    # generic CONFIG_TOOL value.
+    if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG,
+                  DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]:
+        FeatureDeprecated.single_use(f'Configuration method {method.value}', '0.44', 'Use "config-tool" instead.')
+        method = DependencyMethods.CONFIG_TOOL
+    if method is DependencyMethods.QMAKE:
+        FeatureDeprecated.single_use(f'Configuration method "qmake"', '0.58', 'Use "config-tool" instead.')
+        method = DependencyMethods.CONFIG_TOOL
+
+    # Set the detection method. If the method is set to auto, use any available method.
+    # If method is set to a specific string, allow only that detection method.
+    if method == DependencyMethods.AUTO:
+        methods = list(possible)
+    elif method in possible:
+        methods = [method]
+    else:
+        raise DependencyException(
+            'Unsupported detection method: {}, allowed methods are {}'.format(
+                method.value,
+                mlog.format_list([x.value for x in [DependencyMethods.AUTO] + list(possible)])))
+
+    return methods
+
+def detect_compiler(name: str, env: 'Environment', for_machine: MachineChoice,
+                    language: T.Optional[str]) -> T.Optional['Compiler']:
+    """Given a language and environment find the compiler used."""
+    compilers = env.coredata.compilers[for_machine]
+
+    # Set the compiler for this dependency if a language is specified,
+    # else try to pick something that looks usable.
+    if language:
+        if language not in compilers:
+            m = name.capitalize() + ' requires a {0} compiler, but ' \
+                '{0} is not in the list of project languages'
+            raise DependencyException(m.format(language.capitalize()))
+        return compilers[language]
+    else:
+        for lang in clib_langs:
+            try:
+                return compilers[lang]
+            except KeyError:
+                continue
+    return None
+
+
+class SystemDependency(ExternalDependency):
+
+    """Dependency base for System type dependencies."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None) -> None:
+        super().__init__(DependencyTypeName('system'), env, kwargs, language=language)
+        self.name = name
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM]
+
+    def log_tried(self) -> str:
+        return 'system'
+
+
+class BuiltinDependency(ExternalDependency):
+
+    """Dependency base for Builtin type dependencies."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None) -> None:
+        super().__init__(DependencyTypeName('builtin'), env, kwargs, language=language)
+        self.name = name
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.BUILTIN]
+
+    def log_tried(self) -> str:
+        return 'builtin'
diff --git a/meson/mesonbuild/dependencies/boost.py b/meson/mesonbuild/dependencies/boost.py
new file mode 100644
index 000000000..4e5af907e
--- /dev/null
+++ b/meson/mesonbuild/dependencies/boost.py
@@ -0,0 +1,1080 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import functools
+import typing as T
+from pathlib import Path
+
+from .. import mlog
+from .. import mesonlib
+from ..environment import Environment
+
+from .base import DependencyException, SystemDependency
+from .pkgconfig import PkgConfigDependency
+from .misc import threads_factory
+
+if T.TYPE_CHECKING:
+    from ..environment import Properties
+
+# On windows 3 directory layouts are supported:
+# * The default layout (versioned) installed:
+#   - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
+#   - $BOOST_ROOT/lib/*.lib
+# * The non-default layout (system) installed:
+#   - $BOOST_ROOT/include/boost/*.hpp
+#   - $BOOST_ROOT/lib/*.lib
+# * The pre-built binaries from sf.net:
+#   - $BOOST_ROOT/boost/*.hpp
+#   - $BOOST_ROOT/lib-/*.lib where arch=32/64 and compiler=msvc-14.1
+#
+# Note that we should also try to support:
+# mingw-w64 / Windows : libboost_-mt.a            (location = /mingw64/lib/)
+#                       libboost_-mt.dll.a
+#
+# The `modules` argument accept library names. This is because every module that
+# has libraries to link against also has multiple options regarding how to
+# link. See for example:
+# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
+# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
+# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
+
+# **On Unix**, official packaged versions of boost libraries follow the following schemes:
+#
+# Linux / Debian:   libboost_.so -> libboost_.so.1.66.0
+# Linux / Red Hat:  libboost_.so -> libboost_.so.1.66.0
+# Linux / OpenSuse: libboost_.so -> libboost_.so.1.66.0
+# Win   / Cygwin:   libboost_.dll.a                                 (location = /usr/lib)
+#                   libboost_.a
+#                   cygboost__1_64.dll                              (location = /usr/bin)
+# Win   / VS:       boost_-vc-mt[-gd]--1_67.dll          (location = C:/local/boost_1_67_0)
+# Mac   / homebrew: libboost_.dylib + libboost_-mt.dylib    (location = /usr/local/lib)
+# Mac   / macports: libboost_.dylib + libboost_-mt.dylib    (location = /opt/local/lib)
+#
+# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
+#
+# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
+#
+# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
+# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
+# However, its not clear that any Unix distribution follows this scheme.
+# Furthermore, the boost documentation for unix above uses examples from windows like
+#   "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
+#
+# We follow the following strategy for finding modules:
+# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
+# B) Foreach candidate
+#   1. Look for the boost headers (boost/version.pp)
+#   2. Find all boost libraries
+#     2.1 Add all libraries in lib*
+#     2.2 Filter out non boost libraries
+#     2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
+#     2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
+#   3. Select the libraries matching the requested modules
+
+@functools.total_ordering
+class BoostIncludeDir():
+    def __init__(self, path: Path, version_int: int):
+        self.path = path
+        self.version_int = version_int
+        major = int(self.version_int / 100000)
+        minor = int((self.version_int / 100) % 1000)
+        patch = int(self.version_int % 100)
+        self.version = f'{major}.{minor}.{patch}'
+        self.version_lib = f'{major}_{minor}'
+
+    def __repr__(self) -> str:
+        return f''
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostIncludeDir):
+            return (self.version_int, self.path) < (other.version_int, other.path)
+        return NotImplemented
+
+@functools.total_ordering
+class BoostLibraryFile():
+    # Python libraries are special because of the included
+    # minor version in the module name.
+    boost_python_libs = ['boost_python', 'boost_numpy']
+    reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)')
+
+    reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
+    reg_ver_tag = re.compile(r'^[0-9_]+$')
+
+    def __init__(self, path: Path):
+        self.path = path
+        self.name = self.path.name
+
+        # Initialize default properties
+        self.static = False
+        self.toolset = ''
+        self.arch = ''
+        self.version_lib = ''
+        self.mt = True
+
+        self.runtime_static = False
+        self.runtime_debug = False
+        self.python_debug = False
+        self.debug = False
+        self.stlport = False
+        self.deprecated_iostreams = False
+
+        # Post process the library name
+        name_parts = self.name.split('.')
+        self.basename = name_parts[0]
+        self.suffixes = name_parts[1:]
+        self.vers_raw = [x for x in self.suffixes if x.isdigit()]
+        self.suffixes = [x for x in self.suffixes if not x.isdigit()]
+        self.nvsuffix = '.'.join(self.suffixes)  # Used for detecting the library type
+        self.nametags = self.basename.split('-')
+        self.mod_name = self.nametags[0]
+        if self.mod_name.startswith('lib'):
+            self.mod_name = self.mod_name[3:]
+
+        # Set library version if possible
+        if len(self.vers_raw) >= 2:
+            self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
+
+        # Detecting library type
+        if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
+            self.static = False
+        elif self.nvsuffix in ['a', 'lib']:
+            self.static = True
+        else:
+            raise DependencyException(f'Unable to process library extension "{self.nvsuffix}" ({self.path})')
+
+        # boost_.lib is the dll import library
+        if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
+            self.static = False
+
+        # Process tags
+        tags = self.nametags[1:]
+        # Filter out the python version tag and fix modname
+        if self.is_python_lib():
+            tags = self.fix_python_name(tags)
+        if not tags:
+            return
+
+        # Without any tags mt is assumed, however, an absence of mt in the name
+        # with tags present indicates that the lib was built without mt support
+        self.mt = False
+        for i in tags:
+            if i == 'mt':
+                self.mt = True
+            elif len(i) == 3 and i[1:] in ['32', '64']:
+                self.arch = i
+            elif BoostLibraryFile.reg_abi_tag.match(i):
+                self.runtime_static = 's' in i
+                self.runtime_debug = 'g' in i
+                self.python_debug = 'y' in i
+                self.debug = 'd' in i
+                self.stlport = 'p' in i
+                self.deprecated_iostreams = 'n' in i
+            elif BoostLibraryFile.reg_ver_tag.match(i):
+                self.version_lib = i
+            else:
+                self.toolset = i
+
+    def __repr__(self) -> str:
+        return f''
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostLibraryFile):
+            return (
+                self.mod_name, self.static, self.version_lib, self.arch,
+                not self.mt, not self.runtime_static,
+                not self.debug, self.runtime_debug, self.python_debug,
+                self.stlport, self.deprecated_iostreams,
+                self.name,
+            ) < (
+                other.mod_name, other.static, other.version_lib, other.arch,
+                not other.mt, not other.runtime_static,
+                not other.debug, other.runtime_debug, other.python_debug,
+                other.stlport, other.deprecated_iostreams,
+                other.name,
+            )
+        return NotImplemented
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, BoostLibraryFile):
+            return self.name == other.name
+        return NotImplemented
+
+    def __hash__(self) -> int:
+        return hash(self.name)
+
+    @property
+    def abitag(self) -> str:
+        abitag = ''
+        abitag += 'S' if self.static else '-'
+        abitag += 'M' if self.mt else '-'
+        abitag += ' '
+        abitag += 's' if self.runtime_static else '-'
+        abitag += 'g' if self.runtime_debug else '-'
+        abitag += 'y' if self.python_debug else '-'
+        abitag += 'd' if self.debug else '-'
+        abitag += 'p' if self.stlport else '-'
+        abitag += 'n' if self.deprecated_iostreams else '-'
+        abitag += ' ' + (self.arch or '???')
+        abitag += ' ' + (self.toolset or '?')
+        abitag += ' ' + (self.version_lib or 'x_xx')
+        return abitag
+
+    def is_boost(self) -> bool:
+        return any([self.name.startswith(x) for x in ['libboost_', 'boost_']])
+
+    def is_python_lib(self) -> bool:
+        return any([self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs])
+
+    def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
+        # Handle the boost_python naming madeness.
+        # See https://github.com/mesonbuild/meson/issues/4788 for some distro
+        # specific naming variantions.
+        other_tags = []  # type: T.List[str]
+
+        # Split the current modname into the base name and the version
+        m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+        cur_name = m_cur.group(1)
+        cur_vers = m_cur.group(2)
+
+        # Update the current version string if the new version string is longer
+        def update_vers(new_vers: str) -> None:
+            nonlocal cur_vers
+            new_vers = new_vers.replace('_', '')
+            new_vers = new_vers.replace('.', '')
+            if not new_vers.isdigit():
+                return
+            if len(new_vers) > len(cur_vers):
+                cur_vers = new_vers
+
+        for i in tags:
+            if i.startswith('py'):
+                update_vers(i[2:])
+            elif i.isdigit():
+                update_vers(i)
+            elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
+                update_vers(i)
+            else:
+                other_tags += [i]
+
+        self.mod_name = cur_name + cur_vers
+        return other_tags
+
+    def mod_name_matches(self, mod_name: str) -> bool:
+        if self.mod_name == mod_name:
+            return True
+        if not self.is_python_lib():
+            return False
+
+        m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+        m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name)
+
+        if not m_cur or not m_arg:
+            return False
+
+        if m_cur.group(1) != m_arg.group(1):
+            return False
+
+        cur_vers = m_cur.group(2)
+        arg_vers = m_arg.group(2)
+
+        # Always assume python 2 if nothing is specified
+        if not arg_vers:
+            arg_vers = '2'
+
+        return cur_vers.startswith(arg_vers)
+
+    def version_matches(self, version_lib: str) -> bool:
+        # If no version tag is present, assume that it fits
+        if not self.version_lib or not version_lib:
+            return True
+        return self.version_lib == version_lib
+
+    def arch_matches(self, arch: str) -> bool:
+        # If no version tag is present, assume that it fits
+        if not self.arch or not arch:
+            return True
+        return self.arch == arch
+
+    def vscrt_matches(self, vscrt: str) -> bool:
+        # If no vscrt tag present, assume that it fits  ['/MD', '/MDd', '/MT', '/MTd']
+        if not vscrt:
+            return True
+        if vscrt in ['/MD', '-MD']:
+            return not self.runtime_static and not self.runtime_debug
+        elif vscrt in ['/MDd', '-MDd']:
+            return not self.runtime_static and self.runtime_debug
+        elif vscrt in ['/MT', '-MT']:
+            return (self.runtime_static or not self.static) and not self.runtime_debug
+        elif vscrt in ['/MTd', '-MTd']:
+            return (self.runtime_static or not self.static) and self.runtime_debug
+
+        mlog.warning(f'Boost: unknow vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True)
+        return True
+
+    def get_compiler_args(self) -> T.List[str]:
+        args = []  # type: T.List[str]
+        if self.mod_name in boost_libraries:
+            libdef = boost_libraries[self.mod_name]  # type: BoostLibrary
+            if self.static:
+                args += libdef.static
+            else:
+                args += libdef.shared
+            if self.mt:
+                args += libdef.multi
+            else:
+                args += libdef.single
+        return args
+
+    def get_link_args(self) -> T.List[str]:
+        return [self.path.as_posix()]
+
+class BoostDependency(SystemDependency):
+    def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('boost', environment, kwargs, language='cpp')
+        buildtype = environment.coredata.get_option(mesonlib.OptionKey('buildtype'))
+        assert isinstance(buildtype, str)
+        self.debug = buildtype.startswith('debug')
+        self.multithreading = kwargs.get('threading', 'multi') == 'multi'
+
+        self.boost_root = None  # type: T.Optional[Path]
+        self.explicit_static = 'static' in kwargs
+
+        # Extract and validate modules
+        self.modules = mesonlib.extract_as_list(kwargs, 'modules')  # type: T.List[str]
+        for i in self.modules:
+            if not isinstance(i, str):
+                raise DependencyException('Boost module argument is not a string.')
+            if i.startswith('boost_'):
+                raise DependencyException('Boost modules must be passed without the boost_ prefix')
+
+        self.modules_found = []    # type: T.List[str]
+        self.modules_missing = []  # type: T.List[str]
+
+        # Do we need threads?
+        if 'thread' in self.modules:
+            if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+                self.is_found = False
+                return
+
+        # Try figuring out the architecture tag
+        self.arch = environment.machines[self.for_machine].cpu_family
+        self.arch = boost_arch_map.get(self.arch, None)
+
+        # First, look for paths specified in a machine file
+        props = self.env.properties[self.for_machine]
+        if any(x in self.env.properties[self.for_machine] for x in
+               ['boost_includedir', 'boost_librarydir', 'boost_root']):
+            self.detect_boost_machine_file(props)
+            return
+
+        # Finally, look for paths from .pc files and from searching the filesystem
+        self.detect_roots()
+
+    def check_and_set_roots(self, roots: T.List[Path]) -> None:
+        roots = list(mesonlib.OrderedSet(roots))
+        for j in roots:
+            #   1. Look for the boost headers (boost/version.hpp)
+            mlog.debug(f'Checking potential boost root {j.as_posix()}')
+            inc_dirs = self.detect_inc_dirs(j)
+            inc_dirs = sorted(inc_dirs, reverse=True)  # Prefer the newer versions
+
+            # Early abort when boost is not found
+            if not inc_dirs:
+                continue
+
+            lib_dirs = self.detect_lib_dirs(j)
+            self.is_found = self.run_check(inc_dirs, lib_dirs)
+            if self.is_found:
+                self.boost_root = j
+                break
+
+    def detect_boost_machine_file(self, props: 'Properties') -> None:
+        """Detect boost with values in the machine file or environment.
+
+        The machine file values are defaulted to the environment values.
+        """
+        # XXX: if we had a TypedDict we woudn't need this
+        incdir = props.get('boost_includedir')
+        assert incdir is None or isinstance(incdir, str)
+        libdir = props.get('boost_librarydir')
+        assert libdir is None or isinstance(libdir, str)
+
+        if incdir and libdir:
+            inc_dir = Path(incdir)
+            lib_dir = Path(libdir)
+
+            if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+                raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
+
+            mlog.debug('Trying to find boost with:')
+            mlog.debug(f'  - boost_includedir = {inc_dir}')
+            mlog.debug(f'  - boost_librarydir = {lib_dir}')
+
+            return self.detect_split_root(inc_dir, lib_dir)
+
+        elif incdir or libdir:
+            raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
+
+        rootdir = props.get('boost_root')
+        # It shouldn't be possible to get here without something in boost_root
+        assert(rootdir)
+
+        raw_paths = mesonlib.stringlistify(rootdir)
+        paths = [Path(x) for x in raw_paths]
+        if paths and any([not x.is_absolute() for x in paths]):
+            raise DependencyException('boost_root path given in machine file must be absolute')
+
+        self.check_and_set_roots(paths)
+
+    def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+        mlog.debug('  - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
+        mlog.debug('  - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
+
+        #   2. Find all boost libraries
+        libs = []  # type: T.List[BoostLibraryFile]
+        for i in lib_dirs:
+            libs = self.detect_libraries(i)
+            if libs:
+                mlog.debug(f'  - found boost library dir: {i}')
+                # mlog.debug('  - raw library list:')
+                # for j in libs:
+                #     mlog.debug('    - {}'.format(j))
+                break
+        libs = sorted(set(libs))
+
+        modules = ['boost_' + x for x in self.modules]
+        for inc in inc_dirs:
+            mlog.debug(f'  - found boost {inc.version} include dir: {inc.path}')
+            f_libs = self.filter_libraries(libs, inc.version_lib)
+
+            mlog.debug('  - filtered library list:')
+            for j in f_libs:
+                mlog.debug(f'    - {j}')
+
+            #   3. Select the libraries matching the requested modules
+            not_found = []  # type: T.List[str]
+            selected_modules = []  # type: T.List[BoostLibraryFile]
+            for mod in modules:
+                found = False
+                for l in f_libs:
+                    if l.mod_name_matches(mod):
+                        selected_modules += [l]
+                        found = True
+                        break
+                if not found:
+                    not_found += [mod]
+
+            # log the result
+            mlog.debug('  - found:')
+            comp_args = []  # type: T.List[str]
+            link_args = []  # type: T.List[str]
+            for j in selected_modules:
+                c_args = j.get_compiler_args()
+                l_args = j.get_link_args()
+                mlog.debug('    - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
+                comp_args += c_args
+                link_args += l_args
+
+            comp_args = list(set(comp_args))
+            link_args = list(set(link_args))
+
+            self.modules_found = [x.mod_name for x in selected_modules]
+            self.modules_found = [x[6:] for x in self.modules_found]
+            self.modules_found = sorted(set(self.modules_found))
+            self.modules_missing = not_found
+            self.modules_missing = [x[6:] for x in self.modules_missing]
+            self.modules_missing = sorted(set(self.modules_missing))
+
+            # if we found all modules we are done
+            if not not_found:
+                self.version = inc.version
+                self.compile_args = ['-I' + inc.path.as_posix()]
+                self.compile_args += comp_args
+                self.compile_args += self._extra_compile_args()
+                self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
+                self.link_args = link_args
+                mlog.debug(f'  - final compile args: {self.compile_args}')
+                mlog.debug(f'  - final link args:    {self.link_args}')
+                return True
+
+            # in case we missed something log it and try again
+            mlog.debug('  - NOT found:')
+            for mod in not_found:
+                mlog.debug(f'    - {mod}')
+
+        return False
+
+    def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
+        candidates = []  # type: T.List[Path]
+        inc_root = root / 'include'
+
+        candidates += [root / 'boost']
+        candidates += [inc_root / 'boost']
+        if inc_root.is_dir():
+            for i in inc_root.iterdir():
+                if not i.is_dir() or not i.name.startswith('boost-'):
+                    continue
+                candidates += [i / 'boost']
+        candidates = [x for x in candidates if x.is_dir()]
+        candidates = [x / 'version.hpp' for x in candidates]
+        candidates = [x for x in candidates if x.exists()]
+        return [self._include_dir_from_version_header(x) for x in candidates]
+
+    def detect_lib_dirs(self, root: Path) -> T.List[Path]:
+        # First check the system include paths. Only consider those within the
+        # given root path
+        system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
+        system_dirs = [Path(x) for x in system_dirs_t]
+        system_dirs = [x.resolve() for x in system_dirs if x.exists()]
+        system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
+        system_dirs = list(mesonlib.OrderedSet(system_dirs))
+
+        if system_dirs:
+            return system_dirs
+
+        # No system include paths were found --> fall back to manually looking
+        # for library dirs in root
+        dirs = []     # type: T.List[Path]
+        subdirs = []  # type: T.List[Path]
+        for i in root.iterdir():
+            if i.is_dir() and i.name.startswith('lib'):
+                dirs += [i]
+
+        # Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
+        for i in dirs:
+            for j in i.iterdir():
+                if j.is_dir() and j.name.endswith('-linux-gnu'):
+                    subdirs += [j]
+
+        # Filter out paths that don't match the target arch to avoid finding
+        # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
+        if not self.arch:
+            return dirs + subdirs
+
+        arch_list_32 = ['32', 'i386']
+        arch_list_64 = ['64']
+
+        raw_list = dirs + subdirs
+        no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
+
+        matching_arch = []  # type: T.List[Path]
+        if '32' in self.arch:
+            matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
+        elif '64' in self.arch:
+            matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
+
+        return sorted(matching_arch) + sorted(no_arch)
+
+    def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
+        # MSVC is very picky with the library tags
+        vscrt = ''
+        try:
+            crt_val = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value
+            buildtype = self.env.coredata.options[mesonlib.OptionKey('buildtype')].value
+            vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
+        except (KeyError, IndexError, AttributeError):
+            pass
+
+        # mlog.debug('    - static: {}'.format(self.static))
+        # mlog.debug('    - not explicit static: {}'.format(not self.explicit_static))
+        # mlog.debug('    - mt: {}'.format(self.multithreading))
+        # mlog.debug('    - version: {}'.format(lib_vers))
+        # mlog.debug('    - arch: {}'.format(self.arch))
+        # mlog.debug('    - vscrt: {}'.format(vscrt))
+        libs = [x for x in libs if x.static == self.static or not self.explicit_static]
+        libs = [x for x in libs if x.mt == self.multithreading]
+        libs = [x for x in libs if x.version_matches(lib_vers)]
+        libs = [x for x in libs if x.arch_matches(self.arch)]
+        libs = [x for x in libs if x.vscrt_matches(vscrt)]
+        libs = [x for x in libs if x.nvsuffix != 'dll']  # Only link to import libraries
+
+        # Only filter by debug when we are building in release mode. Debug
+        # libraries are automatically preferred through sorting otherwise.
+        if not self.debug:
+            libs = [x for x in libs if not x.debug]
+
+        # Take the abitag from the first library and filter by it. This
+        # ensures that we have a set of libraries that are always compatible.
+        if not libs:
+            return []
+        abitag = libs[0].abitag
+        libs = [x for x in libs if x.abitag == abitag]
+
+        return libs
+
+    def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
+        libs = []  # type: T.List[BoostLibraryFile]
+        for i in libdir.iterdir():
+            if not i.is_file() or i.is_symlink():
+                continue
+            if not any([i.name.startswith(x) for x in ['libboost_', 'boost_']]):
+                continue
+
+            libs += [BoostLibraryFile(i)]
+        return [x for x in libs if x.is_boost()]  # Filter out no boost libraries
+
+    def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None:
+        boost_inc_dir = None
+        for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+            if j.is_file():
+                boost_inc_dir = self._include_dir_from_version_header(j)
+                break
+        if not boost_inc_dir:
+            self.is_found = False
+            return
+
+        self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+
+    def detect_roots(self) -> None:
+        roots = []  # type: T.List[Path]
+
+        # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
+        # allows BoostDependency to find boost from Conan. See #5438
+        try:
+            boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
+            if boost_pc.found():
+                boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
+                if boost_root:
+                    roots += [Path(boost_root)]
+        except DependencyException:
+            pass
+
+        # Add roots from system paths
+        inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
+        inc_paths = [x.parent for x in inc_paths if x.exists()]
+        inc_paths = [x.resolve() for x in inc_paths]
+        roots += inc_paths
+
+        # Add system paths
+        if self.env.machines[self.for_machine].is_windows():
+            # Where boost built from source actually installs it
+            c_root = Path('C:/Boost')
+            if c_root.is_dir():
+                roots += [c_root]
+
+            # Where boost documentation says it should be
+            prog_files = Path('C:/Program Files/boost')
+            # Where boost prebuilt binaries are
+            local_boost = Path('C:/local')
+
+            candidates = []  # type: T.List[Path]
+            if prog_files.is_dir():
+                candidates += [*prog_files.iterdir()]
+            if local_boost.is_dir():
+                candidates += [*local_boost.iterdir()]
+
+            roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
+        else:
+            tmp = []  # type: T.List[Path]
+
+            # Homebrew
+            brew_boost = Path('/usr/local/Cellar/boost')
+            if brew_boost.is_dir():
+                tmp += [x for x in brew_boost.iterdir()]
+
+            # Add some default system paths
+            tmp += [Path('/opt/local')]
+            tmp += [Path('/usr/local/opt/boost')]
+            tmp += [Path('/usr/local')]
+            tmp += [Path('/usr')]
+
+            # Cleanup paths
+            tmp = [x for x in tmp if x.is_dir()]
+            tmp = [x.resolve() for x in tmp]
+            roots += tmp
+
+        self.check_and_set_roots(roots)
+
+    def log_details(self) -> str:
+        res = ''
+        if self.modules_found:
+            res += 'found: ' + ', '.join(self.modules_found)
+        if self.modules_missing:
+            if res:
+                res += ' | '
+            res += 'missing: ' + ', '.join(self.modules_missing)
+        return res
+
+    def log_info(self) -> str:
+        if self.boost_root:
+            return self.boost_root.as_posix()
+        return ''
+
+    def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
+        # Extract the version with a regex. Using clib_compiler.get_define would
+        # also work, however, this is slower (since it the compiler has to be
+        # invoked) and overkill since the layout of the header is always the same.
+        assert hfile.exists()
+        raw = hfile.read_text(encoding='utf-8')
+        m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
+        if not m:
+            mlog.debug(f'Failed to extract version information from {hfile}')
+            return BoostIncludeDir(hfile.parents[1], 0)
+        return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
+
+    def _extra_compile_args(self) -> T.List[str]:
+        # BOOST_ALL_DYN_LINK should not be required with the known defines below
+        return ['-DBOOST_ALL_NO_LIB']  # Disable automatic linking
+
+
+# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
+# See https://mesonbuild.com/Reference-tables.html#cpu-families
+boost_arch_map = {
+    'aarch64': 'a64',
+    'arc': 'a32',
+    'arm': 'a32',
+    'ia64': 'i64',
+    'mips': 'm32',
+    'mips64': 'm64',
+    'ppc': 'p32',
+    'ppc64': 'p64',
+    'sparc': 's32',
+    'sparc64': 's64',
+    'x86': 'x32',
+    'x86_64': 'x64',
+}
+
+
+####      ---- BEGIN GENERATED ----      ####
+#                                           #
+# Generated with tools/boost_names.py:
+#  - boost version:   1.73.0
+#  - modules found:   159
+#  - libraries found: 43
+#
+
+class BoostLibrary():
+    def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+        self.name = name
+        self.shared = shared
+        self.static = static
+        self.single = single
+        self.multi = multi
+
+class BoostModule():
+    def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+        self.name = name
+        self.key = key
+        self.desc = desc
+        self.libs = libs
+
+
+# dict of all know libraries with additional compile options
+boost_libraries = {
+    'boost_atomic': BoostLibrary(
+        name='boost_atomic',
+        shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
+        static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_chrono': BoostLibrary(
+        name='boost_chrono',
+        shared=['-DBOOST_CHRONO_DYN_LINK=1'],
+        static=['-DBOOST_CHRONO_STATIC_LINK=1'],
+        single=['-DBOOST_CHRONO_THREAD_DISABLED'],
+        multi=[],
+    ),
+    'boost_container': BoostLibrary(
+        name='boost_container',
+        shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
+        static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_context': BoostLibrary(
+        name='boost_context',
+        shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_contract': BoostLibrary(
+        name='boost_contract',
+        shared=['-DBOOST_CONTRACT_DYN_LINK'],
+        static=['-DBOOST_CONTRACT_STATIC_LINK'],
+        single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
+        multi=[],
+    ),
+    'boost_coroutine': BoostLibrary(
+        name='boost_coroutine',
+        shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_date_time': BoostLibrary(
+        name='boost_date_time',
+        shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_exception': BoostLibrary(
+        name='boost_exception',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_fiber': BoostLibrary(
+        name='boost_fiber',
+        shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_fiber_numa': BoostLibrary(
+        name='boost_fiber_numa',
+        shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_filesystem': BoostLibrary(
+        name='boost_filesystem',
+        shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
+        static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_graph': BoostLibrary(
+        name='boost_graph',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_iostreams': BoostLibrary(
+        name='boost_iostreams',
+        shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_locale': BoostLibrary(
+        name='boost_locale',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_log': BoostLibrary(
+        name='boost_log',
+        shared=['-DBOOST_LOG_DYN_LINK=1'],
+        static=[],
+        single=['-DBOOST_LOG_NO_THREADS'],
+        multi=[],
+    ),
+    'boost_log_setup': BoostLibrary(
+        name='boost_log_setup',
+        shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
+        static=[],
+        single=['-DBOOST_LOG_NO_THREADS'],
+        multi=[],
+    ),
+    'boost_math_c99': BoostLibrary(
+        name='boost_math_c99',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_c99f': BoostLibrary(
+        name='boost_math_c99f',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_c99l': BoostLibrary(
+        name='boost_math_c99l',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1': BoostLibrary(
+        name='boost_math_tr1',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1f': BoostLibrary(
+        name='boost_math_tr1f',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1l': BoostLibrary(
+        name='boost_math_tr1l',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_mpi': BoostLibrary(
+        name='boost_mpi',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_nowide': BoostLibrary(
+        name='boost_nowide',
+        shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_prg_exec_monitor': BoostLibrary(
+        name='boost_prg_exec_monitor',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_program_options': BoostLibrary(
+        name='boost_program_options',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_random': BoostLibrary(
+        name='boost_random',
+        shared=['-DBOOST_RANDOM_DYN_LINK'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_regex': BoostLibrary(
+        name='boost_regex',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_serialization': BoostLibrary(
+        name='boost_serialization',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_addr2line': BoostLibrary(
+        name='boost_stacktrace_addr2line',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_backtrace': BoostLibrary(
+        name='boost_stacktrace_backtrace',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_basic': BoostLibrary(
+        name='boost_stacktrace_basic',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_noop': BoostLibrary(
+        name='boost_stacktrace_noop',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_windbg': BoostLibrary(
+        name='boost_stacktrace_windbg',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_windbg_cached': BoostLibrary(
+        name='boost_stacktrace_windbg_cached',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_system': BoostLibrary(
+        name='boost_system',
+        shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
+        static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_test_exec_monitor': BoostLibrary(
+        name='boost_test_exec_monitor',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_thread': BoostLibrary(
+        name='boost_thread',
+        shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
+        static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_timer': BoostLibrary(
+        name='boost_timer',
+        shared=['-DBOOST_TIMER_DYN_LINK=1'],
+        static=['-DBOOST_TIMER_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_type_erasure': BoostLibrary(
+        name='boost_type_erasure',
+        shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_unit_test_framework': BoostLibrary(
+        name='boost_unit_test_framework',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_wave': BoostLibrary(
+        name='boost_wave',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_wserialization': BoostLibrary(
+        name='boost_wserialization',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+}
+
+#                                           #
+####       ---- END GENERATED ----       ####
diff --git a/meson/mesonbuild/dependencies/cmake.py b/meson/mesonbuild/dependencies/cmake.py
new file mode 100644
index 000000000..047950da5
--- /dev/null
+++ b/meson/mesonbuild/dependencies/cmake.py
@@ -0,0 +1,718 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, DependencyTypeName
+from ..mesonlib import is_windows, MesonException, OptionKey, PerMachine, stringlistify, extract_as_list
+from ..mesondata import mesondata
+from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException, CMakeToolchain, CMakeExecScope, check_cmake_args, CMakeTarget
+from .. import mlog
+from pathlib import Path
+import functools
+import re
+import os
+import shutil
+import textwrap
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from ..envconfig import MachineInfo
+
+class CMakeInfo(T.NamedTuple):
+    module_paths: T.List[str]
+    cmake_root: str
+    archs: T.List[str]
+    common_paths: T.List[str]
+
+class CMakeDependency(ExternalDependency):
+    # The class's copy of the CMake path. Avoids having to search for it
+    # multiple times in the same Meson invocation.
+    class_cmakeinfo: PerMachine[T.Optional[CMakeInfo]] = PerMachine(None, None)
+    # Version string for the minimum CMake version
+    class_cmake_version = '>=3.4'
+    # CMake generators to try (empty for no generator)
+    class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010']
+    class_working_generator: T.Optional[str] = None
+
+    def _gen_exception(self, msg: str) -> DependencyException:
+        return DependencyException(f'Dependency {self.name} not found: {msg}')
+
+    def _main_cmake_file(self) -> str:
+        return 'CMakeLists.txt'
+
+    def _extra_cmake_opts(self) -> T.List[str]:
+        return []
+
+    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        # Map the input module list to something else
+        # This function will only be executed AFTER the initial CMake
+        # interpreter pass has completed. Thus variables defined in the
+        # CMakeLists.txt can be accessed here.
+        #
+        # Both the modules and components inputs contain the original lists.
+        return modules
+
+    def _map_component_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        # Map the input components list to something else. This
+        # function will be executed BEFORE the initial CMake interpreter
+        # pass. Thus variables from the CMakeLists.txt can NOT be accessed.
+        #
+        # Both the modules and components inputs contain the original lists.
+        return components
+
+    def _original_module_name(self, module: str) -> str:
+        # Reverse the module mapping done by _map_module_list for
+        # one module
+        return module
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        # Gather a list of all languages to support
+        self.language_list = []  # type: T.List[str]
+        if language is None:
+            compilers = None
+            if kwargs.get('native', False):
+                compilers = environment.coredata.compilers.build
+            else:
+                compilers = environment.coredata.compilers.host
+
+            candidates = ['c', 'cpp', 'fortran', 'objc', 'objcxx']
+            self.language_list += [x for x in candidates if x in compilers]
+        else:
+            self.language_list += [language]
+
+        # Add additional languages if required
+        if 'fortran' in self.language_list:
+            self.language_list += ['c']
+
+        # Ensure that the list is unique
+        self.language_list = list(set(self.language_list))
+
+        super().__init__(DependencyTypeName('cmake'), environment, kwargs, language=language)
+        self.name = name
+        self.is_libtool = False
+        # Store a copy of the CMake path on the object itself so it is
+        # stored in the pickled coredata and recovered.
+        self.cmakebin:  T.Optional[CMakeExecutor] = None
+        self.cmakeinfo: T.Optional[CMakeInfo]     = None
+
+        # Where all CMake "build dirs" are located
+        self.cmake_root_dir = environment.scratch_dir
+
+        # T.List of successfully found modules
+        self.found_modules: T.List[str] = []
+
+        # Initialize with None before the first return to avoid
+        # AttributeError exceptions in derived classes
+        self.traceparser: T.Optional[CMakeTraceParser] = None
+
+        # TODO further evaluate always using MachineChoice.BUILD
+        self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent)
+        if not self.cmakebin.found():
+            self.cmakebin = None
+            msg = f'CMake binary for machine {self.for_machine} not found. Giving up.'
+            if self.required:
+                raise DependencyException(msg)
+            mlog.debug(msg)
+            return
+
+        # Setup the trace parser
+        self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+
+        cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
+        cm_args = check_cmake_args(cm_args)
+        if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
+            CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
+        self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
+        if self.cmakeinfo is None:
+            raise self._gen_exception('Unable to obtain CMake system information')
+
+        package_version = kwargs.get('cmake_package_version', '')
+        if not isinstance(package_version, str):
+            raise DependencyException('Keyword "cmake_package_version" must be a string.')
+        components = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'components'))]
+        modules = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'modules'))]
+        modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
+        cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
+        cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
+        if cm_path:
+            cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
+        if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
+            mlog.debug('Preliminary CMake check failed. Aborting.')
+            return
+        self._detect_dep(name, package_version, modules, components, cm_args)
+
+    def __repr__(self) -> str:
+        return f'<{self.__class__.__name__} {self.name}: {self.is_found} {self.version_reqs}>'
+
+    def _get_cmake_info(self, cm_args: T.List[str]) -> T.Optional[CMakeInfo]:
+        mlog.debug("Extracting basic cmake information")
+
+        # Try different CMake generators since specifying no generator may fail
+        # in cygwin for some reason
+        gen_list = []
+        # First try the last working generator
+        if CMakeDependency.class_working_generator is not None:
+            gen_list += [CMakeDependency.class_working_generator]
+        gen_list += CMakeDependency.class_cmake_generators
+
+        temp_parser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+        toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+        toolchain.write()
+
+        for i in gen_list:
+            mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+            # Prepare options
+            cmake_opts = temp_parser.trace_args() + toolchain.get_cmake_args() + ['.']
+            cmake_opts += cm_args
+            if len(i) > 0:
+                cmake_opts = ['-G', i] + cmake_opts
+
+            # Run CMake
+            ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt')
+
+            # Current generator was successful
+            if ret1 == 0:
+                CMakeDependency.class_working_generator = i
+                break
+
+            mlog.debug(f'CMake failed to gather system information for generator {i} with error code {ret1}')
+            mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n')
+
+        # Check if any generator succeeded
+        if ret1 != 0:
+            return None
+
+        try:
+            temp_parser.parse(err1)
+        except MesonException:
+            return None
+
+        def process_paths(l: T.List[str]) -> T.Set[str]:
+            if is_windows():
+                # Cannot split on ':' on Windows because its in the drive letter
+                tmp = [x.split(os.pathsep) for x in l]
+            else:
+                # https://github.com/mesonbuild/meson/issues/7294
+                tmp = [re.split(r':|;', x) for x in l]
+            flattened = [x for sublist in tmp for x in sublist]
+            return set(flattened)
+
+        # Extract the variables and sanity check them
+        root_paths_set = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+        root_paths_set.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+        root_paths = sorted(root_paths_set)
+        root_paths = [x for x in root_paths if os.path.isdir(x)]
+        module_paths_set = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+        rooted_paths: T.List[str] = []
+        for j in [Path(x) for x in root_paths]:
+            for p in [Path(x) for x in module_paths_set]:
+                rooted_paths.append(str(j / p.relative_to(p.anchor)))
+        module_paths = sorted(module_paths_set.union(rooted_paths))
+        module_paths = [x for x in module_paths if os.path.isdir(x)]
+        archs = temp_parser.get_cmake_var('MESON_ARCH_LIST')
+
+        common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share']
+        for i in archs:
+            common_paths += [os.path.join('lib', i)]
+
+        res = CMakeInfo(
+            module_paths=module_paths,
+            cmake_root=temp_parser.get_cmake_var('MESON_CMAKE_ROOT')[0],
+            archs=archs,
+            common_paths=common_paths,
+        )
+
+        mlog.debug(f'  -- Module search paths:    {res.module_paths}')
+        mlog.debug(f'  -- CMake root:             {res.cmake_root}')
+        mlog.debug(f'  -- CMake architectures:    {res.archs}')
+        mlog.debug(f'  -- CMake lib search paths: {res.common_paths}')
+
+        return res
+
+    @staticmethod
+    @functools.lru_cache(maxsize=None)
+    def _cached_listdir(path: str) -> T.Tuple[T.Tuple[str, str], ...]:
+        try:
+            return tuple((x, str(x).lower()) for x in os.listdir(path))
+        except OSError:
+            return tuple()
+
+    @staticmethod
+    @functools.lru_cache(maxsize=None)
+    def _cached_isdir(path: str) -> bool:
+        try:
+            return os.path.isdir(path)
+        except OSError:
+            return False
+
+    def _preliminary_find_check(self, name: str, module_path: T.List[str], prefix_path: T.List[str], machine: 'MachineInfo') -> bool:
+        lname = str(name).lower()
+
+        # Checks , /cmake, /CMake
+        def find_module(path: str) -> bool:
+            for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]:
+                if not self._cached_isdir(i):
+                    continue
+
+                # Check the directory case insensitive
+                content = self._cached_listdir(i)
+                candidates = ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake']
+                candidates = [x.format(name).lower() for x in candidates]
+                if any([x[1] in candidates for x in content]):
+                    return True
+            return False
+
+        # Search in /(lib/|lib*|share) for cmake files
+        def search_lib_dirs(path: str) -> bool:
+            for i in [os.path.join(path, x) for x in self.cmakeinfo.common_paths]:
+                if not self._cached_isdir(i):
+                    continue
+
+                # Check /(lib/|lib*|share)/cmake/*/
+                cm_dir = os.path.join(i, 'cmake')
+                if self._cached_isdir(cm_dir):
+                    content = self._cached_listdir(cm_dir)
+                    content = tuple(x for x in content if x[1].startswith(lname))
+                    for k in content:
+                        if find_module(os.path.join(cm_dir, k[0])):
+                            return True
+
+                # /(lib/|lib*|share)/*/
+                # /(lib/|lib*|share)/*/(cmake|CMake)/
+                content = self._cached_listdir(i)
+                content = tuple(x for x in content if x[1].startswith(lname))
+                for k in content:
+                    if find_module(os.path.join(i, k[0])):
+                        return True
+
+            return False
+
+        # Check the user provided and system module paths
+        for i in module_path + [os.path.join(self.cmakeinfo.cmake_root, 'Modules')]:
+            if find_module(i):
+                return True
+
+        # Check the user provided prefix paths
+        for i in prefix_path:
+            if search_lib_dirs(i):
+                return True
+
+        # Check PATH
+        system_env = []  # type: T.List[str]
+        for i in os.environ.get('PATH', '').split(os.pathsep):
+            if i.endswith('/bin') or i.endswith('\\bin'):
+                i = i[:-4]
+            if i.endswith('/sbin') or i.endswith('\\sbin'):
+                i = i[:-5]
+            system_env += [i]
+
+        # Check the system paths
+        for i in self.cmakeinfo.module_paths + system_env:
+            if find_module(i):
+                return True
+
+            if search_lib_dirs(i):
+                return True
+
+            content = self._cached_listdir(i)
+            content = tuple(x for x in content if x[1].startswith(lname))
+            for k in content:
+                if search_lib_dirs(os.path.join(i, k[0])):
+                    return True
+
+            # Mac framework support
+            if machine.is_darwin():
+                for j in [f'{lname}.framework', f'{lname}.app']:
+                    for k in content:
+                        if k[1] != j:
+                            continue
+                        if find_module(os.path.join(i, k[0], 'Resources')) or find_module(os.path.join(i, k[0], 'Version')):
+                            return True
+
+        # Check the environment path
+        env_path = os.environ.get(f'{name}_DIR')
+        if env_path and find_module(env_path):
+            return True
+
+        return False
+
+    def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]], args: T.List[str]) -> None:
+        # Detect a dependency with CMake using the '--find-package' mode
+        # and the trace output (stderr)
+        #
+        # When the trace output is enabled CMake prints all functions with
+        # parameters to stderr as they are executed. Since CMake 3.4.0
+        # variables ("${VAR}") are also replaced in the trace output.
+        mlog.debug('\nDetermining dependency {!r} with CMake executable '
+                   '{!r}'.format(name, self.cmakebin.executable_path()))
+
+        # Try different CMake generators since specifying no generator may fail
+        # in cygwin for some reason
+        gen_list = []
+        # First try the last working generator
+        if CMakeDependency.class_working_generator is not None:
+            gen_list += [CMakeDependency.class_working_generator]
+        gen_list += CMakeDependency.class_cmake_generators
+
+        # Map the components
+        comp_mapped = self._map_component_list(modules, components)
+        toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+        toolchain.write()
+
+        for i in gen_list:
+            mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+            # Prepare options
+            cmake_opts = []
+            cmake_opts += [f'-DNAME={name}']
+            cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo.archs))]
+            cmake_opts += [f'-DVERSION={package_version}']
+            cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))]
+            cmake_opts += args
+            cmake_opts += self.traceparser.trace_args()
+            cmake_opts += toolchain.get_cmake_args()
+            cmake_opts += self._extra_cmake_opts()
+            cmake_opts += ['.']
+            if len(i) > 0:
+                cmake_opts = ['-G', i] + cmake_opts
+
+            # Run CMake
+            ret1, out1, err1 = self._call_cmake(cmake_opts, self._main_cmake_file())
+
+            # Current generator was successful
+            if ret1 == 0:
+                CMakeDependency.class_working_generator = i
+                break
+
+            mlog.debug(f'CMake failed for generator {i} and package {name} with error code {ret1}')
+            mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n')
+
+        # Check if any generator succeeded
+        if ret1 != 0:
+            return
+
+        try:
+            self.traceparser.parse(err1)
+        except CMakeException as e:
+            e2 = self._gen_exception(str(e))
+            if self.required:
+                raise
+            else:
+                self.compile_args = []
+                self.link_args = []
+                self.is_found = False
+                self.reason = e2
+                return
+
+        # Whether the package is found or not is always stored in PACKAGE_FOUND
+        self.is_found = self.traceparser.var_to_bool('PACKAGE_FOUND')
+        if not self.is_found:
+            return
+
+        # Try to detect the version
+        vers_raw = self.traceparser.get_cmake_var('PACKAGE_VERSION')
+
+        if len(vers_raw) > 0:
+            self.version = vers_raw[0]
+            self.version.strip('"\' ')
+
+        # Post-process module list. Used in derived classes to modify the
+        # module list (append prepend a string, etc.).
+        modules = self._map_module_list(modules, components)
+        autodetected_module_list = False
+
+        # Check if we need a DEBUG or RELEASE CMake dependencies
+        is_debug = False
+        if OptionKey('b_vscrt') in self.env.coredata.options:
+            is_debug = self.env.coredata.get_option(OptionKey('buildtype')) == 'debug'
+            if self.env.coredata.options[OptionKey('b_vscrt')].value in {'mdd', 'mtd'}:
+                is_debug = True
+        else:
+            # Don't directly assign to is_debug to make mypy happy
+            debug_opt = self.env.coredata.get_option(OptionKey('debug'))
+            assert isinstance(debug_opt, bool)
+            is_debug = debug_opt
+
+        # Try guessing a CMake target if none is provided
+        if len(modules) == 0:
+            for i in self.traceparser.targets:
+                tg = i.lower()
+                lname = name.lower()
+                if f'{lname}::{lname}' == tg or lname == tg.replace('::', ''):
+                    mlog.debug(f'Guessed CMake target \'{i}\'')
+                    modules = [(i, True)]
+                    autodetected_module_list = True
+                    break
+
+        # Failed to guess a target --> try the old-style method
+        if len(modules) == 0:
+            # Warn when there might be matching imported targets but no automatic match was used
+            partial_modules: T.List[CMakeTarget] = []
+            for k, v in self.traceparser.targets.items():
+                tg = k.lower()
+                lname = name.lower()
+                if tg.startswith(f'{lname}::'):
+                    partial_modules += [v]
+            if partial_modules:
+                mlog.warning(textwrap.dedent(f'''\
+                    Could not find and exact match for the CMake dependency {name}.
+
+                    However, Meson found the following partial matches:
+
+                        {[x.name for x in partial_modules]}
+
+                    Using imported is recommended, since this approach is less error prone
+                    and better supported by Meson. Consider explicitly specifying one of
+                    these in the dependency call with:
+
+                        dependency('{name}', modules: ['{name}::', ...])
+
+                    Meson will now continue to use the old-style {name}_LIBRARIES CMake
+                    variables to extract the dependency information since no explicit
+                    target is currently specified.
+
+                '''))
+                mlog.debug('More info for the partial match targets:')
+                for tgt in partial_modules:
+                    mlog.debug(tgt)
+
+
+            incDirs = [x for x in self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS') if x]
+            defs = [x for x in self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS') if x]
+            libs_raw = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x]
+
+            # CMake has a "fun" API, where certain keywords describing
+            # configurations can be in the *_LIBRARIES vraiables. See:
+            # - https://github.com/mesonbuild/meson/issues/9197
+            # - https://gitlab.freedesktop.org/libnice/libnice/-/issues/140
+            # - https://cmake.org/cmake/help/latest/command/target_link_libraries.html#overview  (the last point in the section)
+            libs: T.List[str] = []
+            cfg_matches = True
+            cm_tag_map = {'debug': is_debug, 'optimized': not is_debug, 'general': True}
+            for i in libs_raw:
+                if i.lower() in cm_tag_map:
+                    cfg_matches = cm_tag_map[i.lower()]
+                    continue
+                if cfg_matches:
+                    libs += [i]
+                # According to the CMake docs, a keyword only works for the
+                # directly the following item and all items without a keyword
+                # are implizitly `general`
+                cfg_matches = True
+
+            # Try to use old style variables if no module is specified
+            if len(libs) > 0:
+                self.compile_args = list(map(lambda x: f'-I{x}', incDirs)) + defs
+                self.link_args = libs
+                mlog.debug(f'using old-style CMake variables for dependency {name}')
+                mlog.debug(f'Include Dirs:         {incDirs}')
+                mlog.debug(f'Compiler Definitions: {defs}')
+                mlog.debug(f'Libraries:            {libs}')
+                return
+
+            # Even the old-style approach failed. Nothing else we can do here
+            self.is_found = False
+            raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n'
+                                      'Try to explicitly specify one or more targets with the "modules" property.\n'
+                                      'Valid targets are:\n{}'.format(name, list(self.traceparser.targets.keys())))
+
+        # Set dependencies with CMake targets
+        # recognise arguments we should pass directly to the linker
+        reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-pthread|-delayload:[a-zA-Z0-9_\.]+|[a-zA-Z0-9_]+\.lib)$')
+        reg_is_maybe_bare_lib = re.compile(r'^[a-zA-Z0-9_]+$')
+        processed_targets = []
+        incDirs = []
+        compileDefinitions = []
+        compileOptions = []
+        libraries = []
+        for i, required in modules:
+            if i not in self.traceparser.targets:
+                if not required:
+                    mlog.warning('CMake: T.Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found')
+                    continue
+                raise self._gen_exception('CMake: invalid module {} for {}.\n'
+                                          'Try to explicitly specify one or more targets with the "modules" property.\n'
+                                          'Valid targets are:\n{}'.format(self._original_module_name(i), name, list(self.traceparser.targets.keys())))
+
+            targets = [i]
+            if not autodetected_module_list:
+                self.found_modules += [i]
+
+            while len(targets) > 0:
+                curr = targets.pop(0)
+
+                # Skip already processed targets
+                if curr in processed_targets:
+                    continue
+
+                tgt = self.traceparser.targets[curr]
+                cfgs = []
+                cfg = ''
+                otherDeps = []
+                mlog.debug(tgt)
+
+                if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties:
+                    incDirs += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
+
+                if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
+                    compileDefinitions += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
+
+                if 'INTERFACE_COMPILE_OPTIONS' in tgt.properties:
+                    compileOptions += [x for x in tgt.properties['INTERFACE_COMPILE_OPTIONS'] if x]
+
+                if 'IMPORTED_CONFIGURATIONS' in tgt.properties:
+                    cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x]
+                    cfg = cfgs[0]
+
+                if is_debug:
+                    if 'DEBUG' in cfgs:
+                        cfg = 'DEBUG'
+                    elif 'RELEASE' in cfgs:
+                        cfg = 'RELEASE'
+                else:
+                    if 'RELEASE' in cfgs:
+                        cfg = 'RELEASE'
+
+                if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties:
+                    libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x]
+                elif 'IMPORTED_IMPLIB' in tgt.properties:
+                    libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x]
+                elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties:
+                    libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x]
+                elif 'IMPORTED_LOCATION' in tgt.properties:
+                    libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x]
+
+                if 'INTERFACE_LINK_LIBRARIES' in tgt.properties:
+                    otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+
+                if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties:
+                    otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x]
+                elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties:
+                    otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x]
+
+                for j in otherDeps:
+                    if j in self.traceparser.targets:
+                        targets += [j]
+                    elif reg_is_lib.match(j):
+                        libraries += [j]
+                    elif os.path.isabs(j) and os.path.exists(j):
+                        libraries += [j]
+                    elif self.env.machines.build.is_windows() and reg_is_maybe_bare_lib.match(j):
+                        # On Windows, CMake library dependencies can be passed as bare library names,
+                        # e.g. 'version' should translate into 'version.lib'. CMake brute-forces a
+                        # combination of prefix/suffix combinations to find the right library, however
+                        # as we do not have a compiler environment available to us, we cannot do the
+                        # same, but must assume any bare argument passed which is not also a CMake
+                        # target must be a system library we should try to link against
+                        libraries += [f"{j}.lib"]
+                    else:
+                        mlog.warning('CMake: Dependency', mlog.bold(j), 'for', mlog.bold(name), 'target', mlog.bold(self._original_module_name(curr)), 'was not found')
+
+                processed_targets += [curr]
+
+        # Make sure all elements in the lists are unique and sorted
+        incDirs = sorted(set(incDirs))
+        compileDefinitions = sorted(set(compileDefinitions))
+        compileOptions = sorted(set(compileOptions))
+        libraries = sorted(set(libraries))
+
+        mlog.debug(f'Include Dirs:         {incDirs}')
+        mlog.debug(f'Compiler Definitions: {compileDefinitions}')
+        mlog.debug(f'Compiler Options:     {compileOptions}')
+        mlog.debug(f'Libraries:            {libraries}')
+
+        self.compile_args = compileOptions + compileDefinitions + [f'-I{x}' for x in incDirs]
+        self.link_args = libraries
+
+    def _get_build_dir(self) -> Path:
+        build_dir = Path(self.cmake_root_dir) / f'cmake_{self.name}'
+        build_dir.mkdir(parents=True, exist_ok=True)
+        return build_dir
+
+    def _setup_cmake_dir(self, cmake_file: str) -> Path:
+        # Setup the CMake build environment and return the "build" directory
+        build_dir = self._get_build_dir()
+
+        # Remove old CMake cache so we can try out multiple generators
+        cmake_cache = build_dir / 'CMakeCache.txt'
+        cmake_files = build_dir / 'CMakeFiles'
+        if cmake_cache.exists():
+            cmake_cache.unlink()
+        shutil.rmtree(cmake_files.as_posix(), ignore_errors=True)
+
+        # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt
+        cmake_txt = mesondata['dependencies/data/' + cmake_file].data
+
+        # In general, some Fortran CMake find_package() also require C language enabled,
+        # even if nothing from C is directly used. An easy Fortran example that fails
+        # without C language is
+        #   find_package(Threads)
+        # To make this general to
+        # any other language that might need this, we use a list for all
+        # languages and expand in the cmake Project(... LANGUAGES ...) statement.
+        from ..cmake import language_map
+        cmake_language = [language_map[x] for x in self.language_list if x in language_map]
+        if not cmake_language:
+            cmake_language += ['NONE']
+
+        cmake_txt = textwrap.dedent("""
+            cmake_minimum_required(VERSION ${{CMAKE_VERSION}})
+            project(MesonTemp LANGUAGES {})
+        """).format(' '.join(cmake_language)) + cmake_txt
+
+        cm_file = build_dir / 'CMakeLists.txt'
+        cm_file.write_text(cmake_txt, encoding='utf-8')
+        mlog.cmd_ci_include(cm_file.absolute().as_posix())
+
+        return build_dir
+
+    def _call_cmake(self,
+                    args: T.List[str],
+                    cmake_file: str,
+                    env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, T.Optional[str], T.Optional[str]]:
+        build_dir = self._setup_cmake_dir(cmake_file)
+        return self.cmakebin.call(args, build_dir, env=env)
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.CMAKE]
+
+    def log_tried(self) -> str:
+        return self.type_name
+
+    def log_details(self) -> str:
+        modules = [self._original_module_name(x) for x in self.found_modules]
+        modules = sorted(set(modules))
+        if modules:
+            return 'modules: ' + ', '.join(modules)
+        return ''
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+        if cmake and self.traceparser is not None:
+            try:
+                v = self.traceparser.vars[cmake]
+            except KeyError:
+                pass
+            else:
+                if len(v) == 1:
+                    return v[0]
+                elif v:
+                    return v
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'Could not get cmake variable and no default provided for {self!r}')
diff --git a/meson/mesonbuild/dependencies/coarrays.py b/meson/mesonbuild/dependencies/coarrays.py
new file mode 100644
index 000000000..d9af191a7
--- /dev/null
+++ b/meson/mesonbuild/dependencies/coarrays.py
@@ -0,0 +1,90 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import typing as T
+
+from .base import DependencyMethods, detect_compiler, SystemDependency
+from .cmake import CMakeDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+
+if T.TYPE_CHECKING:
+    from . factory import DependencyGenerator
+    from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM})
+def coarray_factory(env: 'Environment',
+                    for_machine: 'MachineChoice',
+                    kwargs: T.Dict[str, T.Any],
+                    methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    fcid = detect_compiler('coarray', env, for_machine, 'fortran').get_id()
+    candidates: T.List['DependencyGenerator'] = []
+
+    if fcid == 'gcc':
+        # OpenCoarrays is the most commonly used method for Fortran Coarray with GCC
+        if DependencyMethods.PKGCONFIG in methods:
+            for pkg in ['caf-openmpi', 'caf']:
+                candidates.append(functools.partial(
+                    PkgConfigDependency, pkg, env, kwargs, language='fortran'))
+
+        if DependencyMethods.CMAKE in methods:
+            if 'modules' not in kwargs:
+                kwargs['modules'] = 'OpenCoarrays::caf_mpi'
+            candidates.append(functools.partial(
+                CMakeDependency, 'OpenCoarrays', env, kwargs, language='fortran'))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(CoarrayDependency, env, kwargs))
+
+    return candidates
+
+
+class CoarrayDependency(SystemDependency):
+    """
+    Coarrays are a Fortran 2008 feature.
+
+    Coarrays are sometimes implemented via external library (GCC+OpenCoarrays),
+    while other compilers just build in support (Cray, IBM, Intel, NAG).
+    Coarrays may be thought of as a high-level language abstraction of
+    low-level MPI calls.
+    """
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('coarray', environment, kwargs, language='fortran')
+        kwargs['required'] = False
+        kwargs['silent'] = True
+
+        cid = self.get_compiler().get_id()
+        if cid == 'gcc':
+            # Fallback to single image
+            self.compile_args = ['-fcoarray=single']
+            self.version = 'single image (fallback)'
+            self.is_found = True
+        elif cid == 'intel':
+            # Coarrays are built into Intel compilers, no external library needed
+            self.is_found = True
+            self.link_args = ['-coarray=shared']
+            self.compile_args = self.link_args
+        elif cid == 'intel-cl':
+            # Coarrays are built into Intel compilers, no external library needed
+            self.is_found = True
+            self.compile_args = ['/Qcoarray:shared']
+        elif cid == 'nagfor':
+            # NAG doesn't require any special arguments for Coarray
+            self.is_found = True
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.AUTO, DependencyMethods.CMAKE, DependencyMethods.PKGCONFIG]
diff --git a/meson/mesonbuild/dependencies/configtool.py b/meson/mesonbuild/dependencies/configtool.py
new file mode 100644
index 000000000..623affb2c
--- /dev/null
+++ b/meson/mesonbuild/dependencies/configtool.py
@@ -0,0 +1,178 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, DependencyTypeName
+from ..mesonlib import listify, Popen_safe, split_args, version_compare, version_compare_many
+from ..programs import find_external_program
+from .. import mlog
+import re
+import typing as T
+
+from mesonbuild import mesonlib
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class ConfigToolDependency(ExternalDependency):
+
+    """Class representing dependencies found using a config tool.
+
+    Takes the following extra keys in kwargs that it uses internally:
+    :tools List[str]: A list of tool names to use
+    :version_arg str: The argument to pass to the tool to get it's version
+    :returncode_value int: The value of the correct returncode
+        Because some tools are stupid and don't return 0
+    """
+
+    tools: T.Optional[T.List[str]] = None
+    tool_name: T.Optional[str] = None
+    version_arg = '--version'
+    __strip_version = re.compile(r'^[0-9][0-9.]+')
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        super().__init__(DependencyTypeName('config-tool'), environment, kwargs, language=language)
+        self.name = name
+        # You may want to overwrite the class version in some cases
+        self.tools = listify(kwargs.get('tools', self.tools))
+        if not self.tool_name:
+            self.tool_name = self.tools[0]
+        if 'version_arg' in kwargs:
+            self.version_arg = kwargs['version_arg']
+
+        req_version_raw = kwargs.get('version', None)
+        if req_version_raw is not None:
+            req_version = mesonlib.stringlistify(req_version_raw)
+        else:
+            req_version = []
+        tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0))
+        self.config = tool
+        self.is_found = self.report_config(version, req_version)
+        if not self.is_found:
+            self.config = None
+            return
+        self.version = version
+
+    def _sanitize_version(self, version: str) -> str:
+        """Remove any non-numeric, non-point version suffixes."""
+        m = self.__strip_version.match(version)
+        if m:
+            # Ensure that there isn't a trailing '.', such as an input like
+            # `1.2.3.git-1234`
+            return m.group(0).rstrip('.')
+        return version
+
+    def find_config(self, versions: T.List[str], returncode: int = 0) \
+            -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+        """Helper method that searches for config tool binaries in PATH and
+        returns the one that best matches the given version requirements.
+        """
+        best_match: T.Tuple[T.Optional[T.List[str]], T.Optional[str]] = (None, None)
+        for potential_bin in find_external_program(
+                self.env, self.for_machine, self.tool_name,
+                self.tool_name, self.tools, allow_default_for_cross=False):
+            if not potential_bin.found():
+                continue
+            tool = potential_bin.get_command()
+            try:
+                p, out = Popen_safe(tool + [self.version_arg])[:2]
+            except (FileNotFoundError, PermissionError):
+                continue
+            if p.returncode != returncode:
+                continue
+
+            out = self._sanitize_version(out.strip())
+            # Some tools, like pcap-config don't supply a version, but also
+            # don't fail with --version, in that case just assume that there is
+            # only one version and return it.
+            if not out:
+                return (tool, None)
+            if versions:
+                is_found = version_compare_many(out, versions)[0]
+                # This allows returning a found version without a config tool,
+                # which is useful to inform the user that you found version x,
+                # but y was required.
+                if not is_found:
+                    tool = None
+            if best_match[1]:
+                if version_compare(out, '> {}'.format(best_match[1])):
+                    best_match = (tool, out)
+            else:
+                best_match = (tool, out)
+
+        return best_match
+
+    def report_config(self, version: T.Optional[str], req_version: T.List[str]) -> bool:
+        """Helper method to print messages about the tool."""
+
+        found_msg: T.List[T.Union[str, mlog.AnsiDecorator]] = [mlog.bold(self.tool_name), 'found:']
+
+        if self.config is None:
+            found_msg.append(mlog.red('NO'))
+            if version is not None and req_version:
+                found_msg.append(f'found {version!r} but need {req_version!r}')
+            elif req_version:
+                found_msg.append(f'need {req_version!r}')
+        else:
+            found_msg += [mlog.green('YES'), '({})'.format(' '.join(self.config)), version]
+
+        mlog.log(*found_msg)
+
+        return self.config is not None
+
+    def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]:
+        p, out, err = Popen_safe(self.config + args)
+        if p.returncode != 0:
+            if self.required:
+                raise DependencyException(f'Could not generate {stage} for {self.name}.\n{err}')
+            return []
+        return split_args(out)
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL]
+
+    def get_configtool_variable(self, variable_name: str) -> str:
+        p, out, _ = Popen_safe(self.config + [f'--{variable_name}'])
+        if p.returncode != 0:
+            if self.required:
+                raise DependencyException(
+                    'Could not get variable "{}" for dependency {}'.format(
+                        variable_name, self.name))
+        variable = out.strip()
+        mlog.debug(f'Got config-tool variable {variable_name} : {variable}')
+        return variable
+
+    def log_tried(self) -> str:
+        return self.type_name
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+        if configtool:
+            # In the not required case '' (empty string) will be returned if the
+            # variable is not found. Since '' is a valid value to return we
+            # set required to True here to force and error, and use the
+            # finally clause to ensure it's restored.
+            restore = self.required
+            self.required = True
+            try:
+                return self.get_configtool_variable(configtool)
+            except DependencyException:
+                pass
+            finally:
+                self.required = restore
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'Could not get config-tool variable and no default provided for {self!r}')
diff --git a/meson/mesonbuild/dependencies/cuda.py b/meson/mesonbuild/dependencies/cuda.py
new file mode 100644
index 000000000..6bc0f05e3
--- /dev/null
+++ b/meson/mesonbuild/dependencies/cuda.py
@@ -0,0 +1,291 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import glob
+import re
+import os
+import typing as T
+from pathlib import Path
+
+from .. import mesonlib
+from .. import mlog
+from ..environment import detect_cpu_family
+from .base import DependencyException, SystemDependency
+
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from ..compilers import Compiler
+
+TV_ResultTuple = T.Tuple[T.Optional[str], T.Optional[str], bool]
+
+class CudaDependency(SystemDependency):
+
+    supported_languages = ['cuda', 'cpp', 'c'] # see also _default_language
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)]
+        language = self._detect_language(compilers)
+        if language not in self.supported_languages:
+            raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.')
+
+        super().__init__('cuda', environment, kwargs, language=language)
+        self.lib_modules: T.Dict[str, T.List[str]] = {}
+        self.requested_modules = self.get_requested(kwargs)
+        if 'cudart' not in self.requested_modules:
+            self.requested_modules = ['cudart'] + self.requested_modules
+
+        (self.cuda_path, self.version, self.is_found) = self._detect_cuda_path_and_version()
+        if not self.is_found:
+            return
+
+        if not os.path.isabs(self.cuda_path):
+            raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.')
+
+        # nvcc already knows where to find the CUDA Toolkit, but if we're compiling
+        # a mixed C/C++/CUDA project, we still need to make the include dir searchable
+        if self.language != 'cuda' or len(compilers) > 1:
+            self.incdir = os.path.join(self.cuda_path, 'include')
+            self.compile_args += [f'-I{self.incdir}']
+
+        if self.language != 'cuda':
+            arch_libdir = self._detect_arch_libdir()
+            self.libdir = os.path.join(self.cuda_path, arch_libdir)
+            mlog.debug('CUDA library directory is', mlog.bold(self.libdir))
+        else:
+            self.libdir = None
+
+        self.is_found = self._find_requested_libraries()
+
+    @classmethod
+    def _detect_language(cls, compilers: T.Dict[str, 'Compiler']) -> str:
+        for lang in cls.supported_languages:
+            if lang in compilers:
+                return lang
+        return list(compilers.keys())[0]
+
+    def _detect_cuda_path_and_version(self) -> TV_ResultTuple:
+        self.env_var = self._default_path_env_var()
+        mlog.debug('Default path env var:', mlog.bold(self.env_var))
+
+        version_reqs = self.version_reqs
+        if self.language == 'cuda':
+            nvcc_version = self._strip_patch_version(self.get_compiler().version)
+            mlog.debug('nvcc version:', mlog.bold(nvcc_version))
+            if version_reqs:
+                # make sure nvcc version satisfies specified version requirements
+                (found_some, not_found, found) = mesonlib.version_compare_many(nvcc_version, version_reqs)
+                if not_found:
+                    msg = f'The current nvcc version {nvcc_version} does not satisfy the specified CUDA Toolkit version requirements {version_reqs}.'
+                    return self._report_dependency_error(msg, (None, None, False))
+
+            # use nvcc version to find a matching CUDA Toolkit
+            version_reqs = [f'={nvcc_version}']
+        else:
+            nvcc_version = None
+
+        paths = [(path, self._cuda_toolkit_version(path), default) for (path, default) in self._cuda_paths()]
+        if version_reqs:
+            return self._find_matching_toolkit(paths, version_reqs, nvcc_version)
+
+        defaults = [(path, version) for (path, version, default) in paths if default]
+        if defaults:
+            return (defaults[0][0], defaults[0][1], True)
+
+        platform_msg = 'set the CUDA_PATH environment variable' if self._is_windows() \
+            else 'set the CUDA_PATH environment variable/create the \'/usr/local/cuda\' symbolic link'
+        msg = f'Please specify the desired CUDA Toolkit version (e.g. dependency(\'cuda\', version : \'>=10.1\')) or {platform_msg} to point to the location of your desired version.'
+        return self._report_dependency_error(msg, (None, None, False))
+
+    def _find_matching_toolkit(self, paths: T.List[TV_ResultTuple], version_reqs: T.List[str], nvcc_version: T.Optional[str]) -> TV_ResultTuple:
+        # keep the default paths order intact, sort the rest in the descending order
+        # according to the toolkit version
+        part_func: T.Callable[[TV_ResultTuple], bool] = lambda t: not t[2]
+        defaults_it, rest_it = mesonlib.partition(part_func, paths)
+        defaults = list(defaults_it)
+        paths = defaults + sorted(rest_it, key=lambda t: mesonlib.Version(t[1]), reverse=True)
+        mlog.debug(f'Search paths: {paths}')
+
+        if nvcc_version and defaults:
+            default_src = f"the {self.env_var} environment variable" if self.env_var else "the \'/usr/local/cuda\' symbolic link"
+            nvcc_warning = 'The default CUDA Toolkit as designated by {} ({}) doesn\'t match the current nvcc version {} and will be ignored.'.format(default_src, os.path.realpath(defaults[0][0]), nvcc_version)
+        else:
+            nvcc_warning = None
+
+        for (path, version, default) in paths:
+            (found_some, not_found, found) = mesonlib.version_compare_many(version, version_reqs)
+            if not not_found:
+                if not default and nvcc_warning:
+                    mlog.warning(nvcc_warning)
+                return (path, version, True)
+
+        if nvcc_warning:
+            mlog.warning(nvcc_warning)
+        return (None, None, False)
+
+    def _default_path_env_var(self) -> T.Optional[str]:
+        env_vars = ['CUDA_PATH'] if self._is_windows() else ['CUDA_PATH', 'CUDA_HOME', 'CUDA_ROOT']
+        env_vars = [var for var in env_vars if var in os.environ]
+        user_defaults = {os.environ[var] for var in env_vars}
+        if len(user_defaults) > 1:
+            mlog.warning('Environment variables {} point to conflicting toolkit locations ({}). Toolkit selection might produce unexpected results.'.format(', '.join(env_vars), ', '.join(user_defaults)))
+        return env_vars[0] if env_vars else None
+
+    def _cuda_paths(self) -> T.List[T.Tuple[str, bool]]:
+        return ([(os.environ[self.env_var], True)] if self.env_var else []) \
+            + (self._cuda_paths_win() if self._is_windows() else self._cuda_paths_nix())
+
+    def _cuda_paths_win(self) -> T.List[T.Tuple[str, bool]]:
+        env_vars = os.environ.keys()
+        return [(os.environ[var], False) for var in env_vars if var.startswith('CUDA_PATH_')]
+
+    def _cuda_paths_nix(self) -> T.List[T.Tuple[str, bool]]:
+        # include /usr/local/cuda default only if no env_var was found
+        pattern = '/usr/local/cuda-*' if self.env_var else '/usr/local/cuda*'
+        return [(path, os.path.basename(path) == 'cuda') for path in glob.iglob(pattern)]
+
+    toolkit_version_regex = re.compile(r'^CUDA Version\s+(.*)$')
+    path_version_win_regex = re.compile(r'^v(.*)$')
+    path_version_nix_regex = re.compile(r'^cuda-(.*)$')
+    cudart_version_regex = re.compile(r'#define\s+CUDART_VERSION\s+([0-9]+)')
+
+    def _cuda_toolkit_version(self, path: str) -> str:
+        version = self._read_toolkit_version_txt(path)
+        if version:
+            return version
+        version = self._read_cuda_runtime_api_version(path)
+        if version:
+            return version
+
+        mlog.debug('Falling back to extracting version from path')
+        path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex
+        try:
+            m = path_version_regex.match(os.path.basename(path))
+            if m:
+                return m.group(1)
+            else:
+                mlog.warning(f'Could not detect CUDA Toolkit version for {path}')
+        except Exception as e:
+            mlog.warning(f'Could not detect CUDA Toolkit version for {path}: {e!s}')
+
+        return '0.0'
+
+    def _read_cuda_runtime_api_version(self, path_str: str) -> T.Optional[str]:
+        path = Path(path_str)
+        for i in path.rglob('cuda_runtime_api.h'):
+            raw = i.read_text(encoding='utf-8')
+            m = self.cudart_version_regex.search(raw)
+            if not m:
+                continue
+            try:
+                vers_int = int(m.group(1))
+            except ValueError:
+                continue
+            # use // for floor instead of / which produces a float
+            major = vers_int // 1000                  # type: int
+            minor = (vers_int - major * 1000) // 10   # type: int
+            return f'{major}.{minor}'
+        return None
+
+    def _read_toolkit_version_txt(self, path: str) -> T.Optional[str]:
+        # Read 'version.txt' at the root of the CUDA Toolkit directory to determine the tookit version
+        version_file_path = os.path.join(path, 'version.txt')
+        try:
+            with open(version_file_path, encoding='utf-8') as version_file:
+                version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168'
+                m = self.toolkit_version_regex.match(version_str)
+                if m:
+                    return self._strip_patch_version(m.group(1))
+        except Exception as e:
+            mlog.debug(f'Could not read CUDA Toolkit\'s version file {version_file_path}: {e!s}')
+
+        return None
+
+    @classmethod
+    def _strip_patch_version(cls, version: str) -> str:
+        return '.'.join(version.split('.')[:2])
+
+    def _detect_arch_libdir(self) -> str:
+        arch = detect_cpu_family(self.env.coredata.compilers.host)
+        machine = self.env.machines[self.for_machine]
+        msg = '{} architecture is not supported in {} version of the CUDA Toolkit.'
+        if machine.is_windows():
+            libdirs = {'x86': 'Win32', 'x86_64': 'x64'}
+            if arch not in libdirs:
+                raise DependencyException(msg.format(arch, 'Windows'))
+            return os.path.join('lib', libdirs[arch])
+        elif machine.is_linux():
+            libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64', 'loongarch64': 'lib64'}
+            if arch not in libdirs:
+                raise DependencyException(msg.format(arch, 'Linux'))
+            return libdirs[arch]
+        elif machine.is_darwin():
+            libdirs = {'x86_64': 'lib64'}
+            if arch not in libdirs:
+                raise DependencyException(msg.format(arch, 'macOS'))
+            return libdirs[arch]
+        else:
+            raise DependencyException('CUDA Toolkit: unsupported platform.')
+
+    def _find_requested_libraries(self) -> bool:
+        all_found = True
+
+        for module in self.requested_modules:
+            args = self.clib_compiler.find_library(module, self.env, [self.libdir] if self.libdir else [])
+            if args is None:
+                self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'')
+                all_found = False
+            else:
+                mlog.debug(f'Link args for CUDA module \'{module}\' are {args}')
+                self.lib_modules[module] = args
+
+        return all_found
+
+    def _is_windows(self) -> bool:
+        return self.env.machines[self.for_machine].is_windows()
+
+    @T.overload
+    def _report_dependency_error(self, msg: str) -> None: ...
+
+    @T.overload
+    def _report_dependency_error(self, msg: str, ret_val: TV_ResultTuple) -> TV_ResultTuple: ...
+
+    def _report_dependency_error(self, msg: str, ret_val: T.Optional[TV_ResultTuple] = None) -> T.Optional[TV_ResultTuple]:
+        if self.required:
+            raise DependencyException(msg)
+
+        mlog.debug(msg)
+        return ret_val
+
+    def log_details(self) -> str:
+        module_str = ', '.join(self.requested_modules)
+        return 'modules: ' + module_str
+
+    def log_info(self) -> str:
+        return self.cuda_path if self.cuda_path else ''
+
+    def get_requested(self, kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        candidates = mesonlib.extract_as_list(kwargs, 'modules')
+        for c in candidates:
+            if not isinstance(c, str):
+                raise DependencyException('CUDA module argument is not a string.')
+        return candidates
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        args = []
+        if self.libdir:
+            args += self.clib_compiler.get_linker_search_args(self.libdir)
+        for lib in self.requested_modules:
+            args += self.lib_modules[lib]
+        return args
diff --git a/meson/mesonbuild/dependencies/data/CMakeLists.txt b/meson/mesonbuild/dependencies/data/CMakeLists.txt
new file mode 100644
index 000000000..acbf64871
--- /dev/null
+++ b/meson/mesonbuild/dependencies/data/CMakeLists.txt
@@ -0,0 +1,98 @@
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+  if ("${VERSION}" STREQUAL "")
+    find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+  else()
+    find_package("${NAME}" "${VERSION}" QUIET COMPONENTS ${COMPS})
+  endif()
+
+  # ARCHS has to be set via the CMD interface
+  if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND  OR  ${PACKAGE_NAME}_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  # Check the following variables:
+  # FOO_VERSION
+  # Foo_VERSION
+  # FOO_VERSION_STRING
+  # Foo_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED ${_packageName}_VERSION)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+    elseif(DEFINED ${_packageName}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # FOO_LIBRARIES
+  # Foo_LIBRARIES
+  # FOO_LIBS
+  # Foo_LIBS
+  set(libs)
+  if(DEFINED ${_packageName}_LIBRARIES)
+    set(libs ${_packageName}_LIBRARIES)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+    set(libs ${PACKAGE_NAME}_LIBRARIES)
+  elseif(DEFINED ${_packageName}_LIBS)
+    set(libs ${_packageName}_LIBS)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+    set(libs ${PACKAGE_NAME}_LIBS)
+  endif()
+
+  # Check the following variables:
+  # FOO_INCLUDE_DIRS
+  # Foo_INCLUDE_DIRS
+  # FOO_INCLUDES
+  # Foo_INCLUDES
+  # FOO_INCLUDE_DIR
+  # Foo_INCLUDE_DIR
+  set(includes)
+  if(DEFINED ${_packageName}_INCLUDE_DIRS)
+    set(includes ${_packageName}_INCLUDE_DIRS)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+  elseif(DEFINED ${_packageName}_INCLUDES)
+    set(includes ${_packageName}_INCLUDES)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+    set(includes ${PACKAGE_NAME}_INCLUDES)
+  elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+    set(includes ${_packageName}_INCLUDE_DIR)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # FOO_DEFINITIONS
+  # Foo_DEFINITIONS
+  set(definitions)
+  if(DEFINED ${_packageName}_DEFINITIONS)
+    set(definitions ${_packageName}_DEFINITIONS)
+  elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+    set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
diff --git a/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt b/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt
new file mode 100644
index 000000000..9d3e41234
--- /dev/null
+++ b/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt
@@ -0,0 +1,95 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+  find_package(LLVM REQUIRED CONFIG QUIET)
+
+  # ARCHS has to be set via the CMD interface
+  if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  foreach(mod IN LISTS LLVM_MESON_MODULES)
+    # Reset variables
+    set(out_mods)
+    set(real_mods)
+
+    # Generate a lower and upper case version
+    string(TOLOWER "${mod}" mod_L)
+    string(TOUPPER "${mod}" mod_U)
+
+    # Get the mapped components
+    llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+    list(SORT              out_mods)
+    list(REMOVE_DUPLICATES out_mods)
+
+    # Make sure that the modules exist
+    foreach(i IN LISTS out_mods)
+      if(TARGET ${i})
+        list(APPEND real_mods ${i})
+      endif()
+    endforeach()
+
+    # Set the output variables
+    set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+    foreach(i IN LISTS real_mods)
+      set(MESON_TARGET_TO_LLVM_${i} ${mod})
+    endforeach()
+  endforeach()
+
+  # Check the following variables:
+  # LLVM_PACKAGE_VERSION
+  # LLVM_VERSION
+  # LLVM_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED LLVM_PACKAGE_VERSION)
+      set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+    elseif(DEFINED LLVM_VERSION)
+      set(PACKAGE_VERSION "${LLVM_VERSION}")
+    elseif(DEFINED LLVM_VERSION_STRING)
+      set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # LLVM_LIBRARIES
+  # LLVM_LIBS
+  set(libs)
+  if(DEFINED LLVM_LIBRARIES)
+    set(libs LLVM_LIBRARIES)
+  elseif(DEFINED LLVM_LIBS)
+    set(libs LLVM_LIBS)
+  endif()
+
+  # Check the following variables:
+  # LLVM_INCLUDE_DIRS
+  # LLVM_INCLUDES
+  # LLVM_INCLUDE_DIR
+  set(includes)
+  if(DEFINED LLVM_INCLUDE_DIRS)
+    set(includes LLVM_INCLUDE_DIRS)
+  elseif(DEFINED LLVM_INCLUDES)
+    set(includes LLVM_INCLUDES)
+  elseif(DEFINED LLVM_INCLUDE_DIR)
+    set(includes LLVM_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # LLVM_DEFINITIONS
+  set(definitions)
+  if(DEFINED LLVM_DEFINITIONS)
+    set(definitions LLVM_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
diff --git a/meson/mesonbuild/dependencies/data/CMakePathInfo.txt b/meson/mesonbuild/dependencies/data/CMakePathInfo.txt
new file mode 100644
index 000000000..662ec5836
--- /dev/null
+++ b/meson/mesonbuild/dependencies/data/CMakePathInfo.txt
@@ -0,0 +1,31 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+  file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+  foreach(dir ${implicit_dirs})
+    if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+      list(APPEND LIB_ARCH_LIST "${dir}")
+    endif()
+  endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
diff --git a/meson/mesonbuild/dependencies/detect.py b/meson/mesonbuild/dependencies/detect.py
new file mode 100644
index 000000000..c6865d54c
--- /dev/null
+++ b/meson/mesonbuild/dependencies/detect.py
@@ -0,0 +1,226 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import Dependency, ExternalDependency, DependencyException, DependencyMethods, NotFoundDependency
+from .cmake import CMakeDependency
+from .dub import DubDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+
+from ..mesonlib import listify, MachineChoice, PerMachine
+from .. import mlog
+import functools
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from .factory import DependencyFactory, WrappedFactoryFunc, DependencyGenerator
+
+# These must be defined in this file to avoid cyclical references.
+packages: T.Dict[
+    str,
+    T.Union[T.Type[ExternalDependency], 'DependencyFactory', 'WrappedFactoryFunc']
+] = {}
+_packages_accept_language: T.Set[str] = set()
+
+if T.TYPE_CHECKING:
+    TV_DepIDEntry = T.Union[str, bool, int, T.Tuple[str, ...]]
+    TV_DepID = T.Tuple[T.Tuple[str, TV_DepIDEntry], ...]
+
+
+def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
+    identifier: 'TV_DepID' = (('name', name), )
+    from ..interpreter import permitted_dependency_kwargs
+    assert len(permitted_dependency_kwargs) == 19, \
+           'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here'
+    for key, value in kwargs.items():
+        # 'version' is irrelevant for caching; the caller must check version matches
+        # 'native' is handled above with `for_machine`
+        # 'required' is irrelevant for caching; the caller handles it separately
+        # 'fallback' and 'allow_fallback' is not part of the cache because,
+        #     once a dependency has been found through a fallback, it should
+        #     be used for the rest of the Meson run.
+        # 'default_options' is only used in fallback case
+        # 'not_found_message' has no impact on the dependency lookup
+        # 'include_type' is handled after the dependency lookup
+        if key in ('version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options',
+                   'not_found_message', 'include_type'):
+            continue
+        # All keyword arguments are strings, ints, or lists (or lists of lists)
+        if isinstance(value, list):
+            value = frozenset(listify(value))
+            for i in value:
+                assert isinstance(i, str)
+        else:
+            assert isinstance(value, (str, bool, int))
+        identifier += (key, value)
+    return identifier
+
+display_name_map = {
+    'boost': 'Boost',
+    'cuda': 'CUDA',
+    'dub': 'DUB',
+    'gmock': 'GMock',
+    'gtest': 'GTest',
+    'hdf5': 'HDF5',
+    'llvm': 'LLVM',
+    'mpi': 'MPI',
+    'netcdf': 'NetCDF',
+    'openmp': 'OpenMP',
+    'wxwidgets': 'WxWidgets',
+}
+
+def find_external_dependency(name: str, env: 'Environment', kwargs: T.Dict[str, object]) -> T.Union['ExternalDependency', NotFoundDependency]:
+    assert(name)
+    required = kwargs.get('required', True)
+    if not isinstance(required, bool):
+        raise DependencyException('Keyword "required" must be a boolean.')
+    if not isinstance(kwargs.get('method', ''), str):
+        raise DependencyException('Keyword "method" must be a string.')
+    lname = name.lower()
+    if lname not in _packages_accept_language and 'language' in kwargs:
+        raise DependencyException(f'{name} dependency does not accept "language" keyword argument')
+    if not isinstance(kwargs.get('version', ''), (str, list)):
+        raise DependencyException('Keyword "Version" must be string or list.')
+
+    # display the dependency name with correct casing
+    display_name = display_name_map.get(lname, lname)
+
+    for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
+
+    type_text = PerMachine('Build-time', 'Run-time')[for_machine] + ' dependency'
+
+    # build a list of dependency methods to try
+    candidates = _build_external_dependency_list(name, env, for_machine, kwargs)
+
+    pkg_exc: T.List[DependencyException] = []
+    pkgdep:  T.List[ExternalDependency]  = []
+    details = ''
+
+    for c in candidates:
+        # try this dependency method
+        try:
+            d = c()
+            d._check_version()
+            pkgdep.append(d)
+        except DependencyException as e:
+            pkg_exc.append(e)
+            mlog.debug(str(e))
+        else:
+            pkg_exc.append(None)
+            details = d.log_details()
+            if details:
+                details = '(' + details + ') '
+            if 'language' in kwargs:
+                details += 'for ' + d.language + ' '
+
+            # if the dependency was found
+            if d.found():
+
+                info: mlog.TV_LoggableList = []
+                if d.version:
+                    info.append(mlog.normal_cyan(d.version))
+
+                log_info = d.log_info()
+                if log_info:
+                    info.append('(' + log_info + ')')
+
+                mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.green('YES'), *info)
+
+                return d
+
+    # otherwise, the dependency could not be found
+    tried_methods = [d.log_tried() for d in pkgdep if d.log_tried()]
+    if tried_methods:
+        tried = '{}'.format(mlog.format_list(tried_methods))
+    else:
+        tried = ''
+
+    mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.red('NO'),
+             f'(tried {tried})' if tried else '')
+
+    if required:
+        # if an exception occurred with the first detection method, re-raise it
+        # (on the grounds that it came from the preferred dependency detection
+        # method)
+        if pkg_exc and pkg_exc[0]:
+            raise pkg_exc[0]
+
+        # we have a list of failed ExternalDependency objects, so we can report
+        # the methods we tried to find the dependency
+        raise DependencyException('Dependency "%s" not found' % (name) +
+                                  (', tried %s' % (tried) if tried else ''))
+
+    return NotFoundDependency(env)
+
+
+def _build_external_dependency_list(name: str, env: 'Environment', for_machine: MachineChoice,
+                                    kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+    # First check if the method is valid
+    if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]:
+        raise DependencyException('method {!r} is invalid'.format(kwargs['method']))
+
+    # Is there a specific dependency detector for this dependency?
+    lname = name.lower()
+    if lname in packages:
+        # Create the list of dependency object constructors using a factory
+        # class method, if one exists, otherwise the list just consists of the
+        # constructor
+        if isinstance(packages[lname], type):
+            entry1 = T.cast(T.Type[ExternalDependency], packages[lname])  # mypy doesn't understand isinstance(..., type)
+            if issubclass(entry1, ExternalDependency):
+                # TODO: somehow make mypy understand that entry1(env, kwargs) is OK...
+                func: T.Callable[[], 'ExternalDependency'] = lambda: entry1(env, kwargs)  # type: ignore
+                dep = [func]
+        else:
+            entry2 = T.cast(T.Union['DependencyFactory', 'WrappedFactoryFunc'], packages[lname])
+            dep = entry2(env, for_machine, kwargs)
+        return dep
+
+    candidates: T.List['DependencyGenerator'] = []
+
+    # If it's explicitly requested, use the dub detection method (only)
+    if 'dub' == kwargs.get('method', ''):
+        candidates.append(functools.partial(DubDependency, name, env, kwargs))
+        return candidates
+
+    # If it's explicitly requested, use the pkgconfig detection method (only)
+    if 'pkg-config' == kwargs.get('method', ''):
+        candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
+        return candidates
+
+    # If it's explicitly requested, use the CMake detection method (only)
+    if 'cmake' == kwargs.get('method', ''):
+        candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
+        return candidates
+
+    # If it's explicitly requested, use the Extraframework detection method (only)
+    if 'extraframework' == kwargs.get('method', ''):
+        # On OSX, also try framework dependency detector
+        if env.machines[for_machine].is_darwin():
+            candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
+        return candidates
+
+    # Otherwise, just use the pkgconfig and cmake dependency detector
+    if 'auto' == kwargs.get('method', 'auto'):
+        candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
+
+        # On OSX, also try framework dependency detector
+        if env.machines[for_machine].is_darwin():
+            candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
+
+        # Only use CMake as a last resort, since it might not work 100% (see #6113)
+        candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
+
+    return candidates
diff --git a/meson/mesonbuild/dependencies/dev.py b/meson/mesonbuild/dependencies/dev.py
new file mode 100644
index 000000000..7300e2fe7
--- /dev/null
+++ b/meson/mesonbuild/dependencies/dev.py
@@ -0,0 +1,595 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies useful for
+# development purposes, such as testing, debugging, etc..
+
+import glob
+import os
+import re
+import pathlib
+import shutil
+import typing as T
+
+from .. import mesonlib, mlog
+from ..compilers import AppleClangCCompiler, AppleClangCPPCompiler, detect_compiler_for
+from ..environment import get_llvm_tool_names
+from ..mesonlib import version_compare, stringlistify, extract_as_list, MachineChoice
+from .base import DependencyException, DependencyMethods, strip_system_libdirs, SystemDependency
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .factory import DependencyFactory
+from .misc import threads_factory
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from ..envconfig import MachineInfo
+    from .. environment import Environment
+
+
+def get_shared_library_suffix(environment: 'Environment', for_machine: MachineChoice) -> str:
+    """This is only guaranteed to work for languages that compile to machine
+    code, not for languages like C# that use a bytecode and always end in .dll
+    """
+    m = environment.machines[for_machine]
+    if m.is_windows():
+        return '.dll'
+    elif m.is_darwin():
+        return '.dylib'
+    return '.so'
+
+
+class GTestDependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs, language='cpp')
+        self.main = kwargs.get('main', False)
+        self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
+        self.detect()
+
+    def detect(self) -> None:
+        gtest_detect = self.clib_compiler.find_library("gtest", self.env, [])
+        gtest_main_detect = self.clib_compiler.find_library("gtest_main", self.env, [])
+        if gtest_detect and (not self.main or gtest_main_detect):
+            self.is_found = True
+            self.compile_args = []
+            self.link_args = gtest_detect
+            if self.main:
+                self.link_args += gtest_main_detect
+            self.sources = []
+            self.prebuilt = True
+        elif self.detect_srcdir():
+            self.is_found = True
+            self.compile_args = ['-I' + d for d in self.src_include_dirs]
+            self.link_args = []
+            if self.main:
+                self.sources = [self.all_src, self.main_src]
+            else:
+                self.sources = [self.all_src]
+            self.prebuilt = False
+        else:
+            self.is_found = False
+
+    def detect_srcdir(self) -> bool:
+        for s in self.src_dirs:
+            if os.path.exists(s):
+                self.src_dir = s
+                self.all_src = mesonlib.File.from_absolute_file(
+                    os.path.join(self.src_dir, 'gtest-all.cc'))
+                self.main_src = mesonlib.File.from_absolute_file(
+                    os.path.join(self.src_dir, 'gtest_main.cc'))
+                self.src_include_dirs = [os.path.normpath(os.path.join(self.src_dir, '..')),
+                                         os.path.normpath(os.path.join(self.src_dir, '../include')),
+                                         ]
+                return True
+        return False
+
+    def log_info(self) -> str:
+        if self.prebuilt:
+            return 'prebuilt'
+        else:
+            return 'building self'
+
+    def log_tried(self) -> str:
+        return 'system'
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+
+
+class GTestDependencyPC(PkgConfigDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        assert name == 'gtest'
+        if kwargs.get('main'):
+            name = 'gtest_main'
+        super().__init__(name, environment, kwargs)
+
+
+class GMockDependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs, language='cpp')
+        self.main = kwargs.get('main', False)
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
+
+        # If we are getting main() from GMock, we definitely
+        # want to avoid linking in main() from GTest
+        gtest_kwargs = kwargs.copy()
+        if self.main:
+            gtest_kwargs['main'] = False
+
+        # GMock without GTest is pretty much useless
+        # this also mimics the structure given in WrapDB,
+        # where GMock always pulls in GTest
+        found = self._add_sub_dependency(gtest_factory(environment, self.for_machine, gtest_kwargs))
+        if not found:
+            self.is_found = False
+            return
+
+        # GMock may be a library or just source.
+        # Work with both.
+        gmock_detect = self.clib_compiler.find_library("gmock", self.env, [])
+        gmock_main_detect = self.clib_compiler.find_library("gmock_main", self.env, [])
+        if gmock_detect and (not self.main or gmock_main_detect):
+            self.is_found = True
+            self.link_args += gmock_detect
+            if self.main:
+                self.link_args += gmock_main_detect
+            self.prebuilt = True
+            return
+
+        for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']:
+            if os.path.exists(d):
+                self.is_found = True
+                # Yes, we need both because there are multiple
+                # versions of gmock that do different things.
+                d2 = os.path.normpath(os.path.join(d, '..'))
+                self.compile_args += ['-I' + d, '-I' + d2, '-I' + os.path.join(d2, 'include')]
+                all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc'))
+                main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc'))
+                if self.main:
+                    self.sources += [all_src, main_src]
+                else:
+                    self.sources += [all_src]
+                self.prebuilt = False
+                return
+
+        self.is_found = False
+
+    def log_info(self) -> str:
+        if self.prebuilt:
+            return 'prebuilt'
+        else:
+            return 'building self'
+
+    def log_tried(self) -> str:
+        return 'system'
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+
+
+class GMockDependencyPC(PkgConfigDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        assert name == 'gmock'
+        if kwargs.get('main'):
+            name = 'gmock_main'
+        super().__init__(name, environment, kwargs)
+
+
+class LLVMDependencyConfigTool(ConfigToolDependency):
+    """
+    LLVM uses a special tool, llvm-config, which has arguments for getting
+    c args, cxx args, and ldargs as well as version.
+    """
+    tool_name = 'llvm-config'
+    __cpp_blacklist = {'-DNDEBUG'}
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        self.tools = get_llvm_tool_names('llvm-config')
+
+        # Fedora starting with Fedora 30 adds a suffix of the number
+        # of bits in the isa that llvm targets, for example, on x86_64
+        # and aarch64 the name will be llvm-config-64, on x86 and arm
+        # it will be llvm-config-32.
+        if environment.machines[self.get_for_machine_from_kwargs(kwargs)].is_64_bit:
+            self.tools.append('llvm-config-64')
+        else:
+            self.tools.append('llvm-config-32')
+
+        # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
+        # the C linker works fine if only using the C API.
+        super().__init__(name, environment, kwargs, language='cpp')
+        self.provided_modules: T.List[str] = []
+        self.required_modules: mesonlib.OrderedSet[str]  = mesonlib.OrderedSet()
+        self.module_details:   T.List[str] = []
+        if not self.is_found:
+            return
+
+        self.provided_modules = self.get_config_value(['--components'], 'modules')
+        modules = stringlistify(extract_as_list(kwargs, 'modules'))
+        self.check_components(modules)
+        opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+        self.check_components(opt_modules, required=False)
+
+        cargs = mesonlib.OrderedSet(self.get_config_value(['--cppflags'], 'compile_args'))
+        self.compile_args = list(cargs.difference(self.__cpp_blacklist))
+
+        if version_compare(self.version, '>= 3.9'):
+            self._set_new_link_args(environment)
+        else:
+            self._set_old_link_args()
+        self.link_args = strip_system_libdirs(environment, self.for_machine, self.link_args)
+        self.link_args = self.__fix_bogus_link_args(self.link_args)
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
+
+    def __fix_bogus_link_args(self, args: T.List[str]) -> T.List[str]:
+        """This function attempts to fix bogus link arguments that llvm-config
+        generates.
+
+        Currently it works around the following:
+            - FreeBSD: when statically linking -l/usr/lib/libexecinfo.so will
+              be generated, strip the -l in cases like this.
+            - Windows: We may get -LIBPATH:... which is later interpreted as
+              "-L IBPATH:...", if we're using an msvc like compilers convert
+              that to "/LIBPATH", otherwise to "-L ..."
+        """
+
+        new_args = []
+        for arg in args:
+            if arg.startswith('-l') and arg.endswith('.so'):
+                new_args.append(arg.lstrip('-l'))
+            elif arg.startswith('-LIBPATH:'):
+                cpp = self.env.coredata.compilers[self.for_machine]['cpp']
+                new_args.extend(cpp.get_linker_search_args(arg.lstrip('-LIBPATH:')))
+            else:
+                new_args.append(arg)
+        return new_args
+
+    def __check_libfiles(self, shared: bool) -> None:
+        """Use llvm-config's --libfiles to check if libraries exist."""
+        mode = '--link-shared' if shared else '--link-static'
+
+        # Set self.required to true to force an exception in get_config_value
+        # if the returncode != 0
+        restore = self.required
+        self.required = True
+
+        try:
+            # It doesn't matter what the stage is, the caller needs to catch
+            # the exception anyway.
+            self.link_args = self.get_config_value(['--libfiles', mode], '')
+        finally:
+            self.required = restore
+
+    def _set_new_link_args(self, environment: 'Environment') -> None:
+        """How to set linker args for LLVM versions >= 3.9"""
+        try:
+            mode = self.get_config_value(['--shared-mode'], 'link_args')[0]
+        except IndexError:
+            mlog.debug('llvm-config --shared-mode returned an error')
+            self.is_found = False
+            return
+
+        if not self.static and mode == 'static':
+            # If llvm is configured with LLVM_BUILD_LLVM_DYLIB but not with
+            # LLVM_LINK_LLVM_DYLIB and not LLVM_BUILD_SHARED_LIBS (which
+            # upstream doesn't recommend using), then llvm-config will lie to
+            # you about how to do shared-linking. It wants to link to a a bunch
+            # of individual shared libs (which don't exist because llvm wasn't
+            # built with LLVM_BUILD_SHARED_LIBS.
+            #
+            # Therefore, we'll try to get the libfiles, if the return code is 0
+            # or we get an empty list, then we'll try to build a working
+            # configuration by hand.
+            try:
+                self.__check_libfiles(True)
+            except DependencyException:
+                lib_ext = get_shared_library_suffix(environment, self.for_machine)
+                libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+                # Sort for reproducibility
+                matches = sorted(glob.iglob(os.path.join(libdir, f'libLLVM*{lib_ext}')))
+                if not matches:
+                    if self.required:
+                        raise
+                    self.is_found = False
+                    return
+
+                self.link_args = self.get_config_value(['--ldflags'], 'link_args')
+                libname = os.path.basename(matches[0]).rstrip(lib_ext).lstrip('lib')
+                self.link_args.append(f'-l{libname}')
+                return
+        elif self.static and mode == 'shared':
+            # If, however LLVM_BUILD_SHARED_LIBS is true # (*cough* gentoo *cough*)
+            # then this is correct. Building with LLVM_BUILD_SHARED_LIBS has a side
+            # effect, it stops the generation of static archives. Therefore we need
+            # to check for that and error out on static if this is the case
+            try:
+                self.__check_libfiles(False)
+            except DependencyException:
+                if self.required:
+                    raise
+                self.is_found = False
+                return
+
+        link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared']
+        self.link_args = self.get_config_value(
+            ['--libs', '--ldflags'] + link_args + list(self.required_modules),
+            'link_args')
+
+    def _set_old_link_args(self) -> None:
+        """Setting linker args for older versions of llvm.
+
+        Old versions of LLVM bring an extra level of insanity with them.
+        llvm-config will provide the correct arguments for static linking, but
+        not for shared-linnking, we have to figure those out ourselves, because
+        of course we do.
+        """
+        if self.static:
+            self.link_args = self.get_config_value(
+                ['--libs', '--ldflags', '--system-libs'] + list(self.required_modules),
+                'link_args')
+        else:
+            # llvm-config will provide arguments for static linking, so we get
+            # to figure out for ourselves what to link with. We'll do that by
+            # checking in the directory provided by --libdir for a library
+            # called libLLVM-.(so|dylib|dll)
+            libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+
+            expected_name = f'libLLVM-{self.version}'
+            re_name = re.compile(fr'{expected_name}.(so|dll|dylib)$')
+
+            for file_ in os.listdir(libdir):
+                if re_name.match(file_):
+                    self.link_args = [f'-L{libdir}',
+                                      '-l{}'.format(os.path.splitext(file_.lstrip('lib'))[0])]
+                    break
+            else:
+                raise DependencyException(
+                    'Could not find a dynamically linkable library for LLVM.')
+
+    def check_components(self, modules: T.List[str], required: bool = True) -> None:
+        """Check for llvm components (modules in meson terms).
+
+        The required option is whether the module is required, not whether LLVM
+        is required.
+        """
+        for mod in sorted(set(modules)):
+            status = ''
+
+            if mod not in self.provided_modules:
+                if required:
+                    self.is_found = False
+                    if self.required:
+                        raise DependencyException(
+                            f'Could not find required LLVM Component: {mod}')
+                    status = '(missing)'
+                else:
+                    status = '(missing but optional)'
+            else:
+                self.required_modules.add(mod)
+
+            self.module_details.append(mod + status)
+
+    def log_details(self) -> str:
+        if self.module_details:
+            return 'modules: ' + ', '.join(self.module_details)
+        return ''
+
+class LLVMDependencyCMake(CMakeDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        self.llvm_modules = stringlistify(extract_as_list(kwargs, 'modules'))
+        self.llvm_opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+        super().__init__(name, env, kwargs, language='cpp')
+
+        # Cmake will always create a statically linked binary, so don't use
+        # cmake if dynamic is required
+        if not self.static:
+            self.is_found = False
+            mlog.warning('Ignoring LLVM CMake dependency because dynamic was requested')
+            return
+
+        if self.traceparser is None:
+            return
+
+        # Extract extra include directories and definitions
+        inc_dirs = self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS')
+        defs = self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS')
+        # LLVM explicitly uses space-separated variables rather than semicolon lists
+        if len(defs) == 1:
+            defs = defs[0].split(' ')
+        temp = ['-I' + x for x in inc_dirs] + defs
+        self.compile_args += [x for x in temp if x not in self.compile_args]
+        if not self._add_sub_dependency(threads_factory(env, self.for_machine, {})):
+            self.is_found = False
+            return
+
+    def _main_cmake_file(self) -> str:
+        # Use a custom CMakeLists.txt for LLVM
+        return 'CMakeListsLLVM.txt'
+
+    def _extra_cmake_opts(self) -> T.List[str]:
+        return ['-DLLVM_MESON_MODULES={}'.format(';'.join(self.llvm_modules + self.llvm_opt_modules))]
+
+    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        res = []
+        for mod, required in modules:
+            cm_targets = self.traceparser.get_cmake_var(f'MESON_LLVM_TARGETS_{mod}')
+            if not cm_targets:
+                if required:
+                    raise self._gen_exception(f'LLVM module {mod} was not found')
+                else:
+                    mlog.warning('Optional LLVM module', mlog.bold(mod), 'was not found')
+                    continue
+            for i in cm_targets:
+                res += [(i, required)]
+        return res
+
+    def _original_module_name(self, module: str) -> str:
+        orig_name = self.traceparser.get_cmake_var(f'MESON_TARGET_TO_LLVM_{module}')
+        if orig_name:
+            return orig_name[0]
+        return module
+
+
+class ValgrindDependency(PkgConfigDependency):
+    '''
+    Consumers of Valgrind usually only need the compile args and do not want to
+    link to its (static) libraries.
+    '''
+    def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('valgrind', env, kwargs)
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        return []
+
+
+class ZlibSystemDependency(SystemDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+
+        m = self.env.machines[self.for_machine]
+
+        # I'm not sure this is entirely correct. What if we're cross compiling
+        # from something to macOS?
+        if ((m.is_darwin() and isinstance(self.clib_compiler, (AppleClangCCompiler, AppleClangCPPCompiler))) or
+                m.is_freebsd() or m.is_dragonflybsd()):
+            # No need to set includes,
+            # on macos xcode/clang will do that for us.
+            # on freebsd zlib.h is in /usr/include
+
+            self.is_found = True
+            self.link_args = ['-lz']
+        elif m.is_windows():
+            # Without a clib_compiler we can't find zlib, s just give up.
+            if self.clib_compiler is None:
+                self.is_found = False
+                return
+
+            if self.clib_compiler.get_argument_syntax() == 'msvc':
+                libs = ['zlib1' 'zlib']
+            else:
+                libs = ['z']
+            for lib in libs:
+                l = self.clib_compiler.find_library(lib, environment, [])
+                h = self.clib_compiler.has_header('zlib.h', '', environment, dependencies=[self])
+                if l and h[0]:
+                    self.is_found = True
+                    self.link_args = l
+                    break
+            else:
+                return
+        else:
+            mlog.debug(f'Unsupported OS {m.system}')
+            return
+
+        v, _ = self.clib_compiler.get_define('ZLIB_VERSION', '#include ', self.env, [], [self])
+        self.version = v.strip('"')
+
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM]
+
+
+class JDKSystemDependency(SystemDependency):
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('jdk', environment, kwargs)
+
+        m = self.env.machines[self.for_machine]
+
+        if 'java' not in environment.coredata.compilers[self.for_machine]:
+            detect_compiler_for(environment, 'java', self.for_machine)
+        self.javac = environment.coredata.compilers[self.for_machine]['java']
+        self.version = self.javac.version
+
+        if 'version' in kwargs and not version_compare(self.version, kwargs['version']):
+            mlog.error(f'Incorrect JDK version found ({self.version}), wanted {kwargs["version"]}')
+            self.is_found = False
+            return
+
+        self.java_home = environment.properties[self.for_machine].get_java_home()
+        if not self.java_home:
+            self.java_home = pathlib.Path(shutil.which(self.javac.exelist[0])).resolve().parents[1]
+
+        platform_include_dir = self.__machine_info_to_platform_include_dir(m)
+        if platform_include_dir is None:
+            mlog.error("Could not find a JDK platform include directory for your OS, please open an issue or provide a pull request.")
+            self.is_found = False
+            return
+
+        java_home_include = self.java_home / 'include'
+        self.compile_args.append(f'-I{java_home_include}')
+        self.compile_args.append(f'-I{java_home_include / platform_include_dir}')
+        self.is_found = True
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM]
+
+    @staticmethod
+    def __machine_info_to_platform_include_dir(m: 'MachineInfo') -> T.Optional[str]:
+        """Translates the machine information to the platform-dependent include directory
+
+        When inspecting a JDK release tarball or $JAVA_HOME, inside the `include/` directory is a
+        platform dependent folder that must be on the target's include path in addition to the
+        parent `include/` directory.
+        """
+        if m.is_linux():
+            return 'linux'
+        elif m.is_windows():
+            return 'win32'
+        elif m.is_darwin():
+            return 'darwin'
+
+        return None
+
+
+llvm_factory = DependencyFactory(
+    'LLVM',
+    [DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL],
+    cmake_class=LLVMDependencyCMake,
+    configtool_class=LLVMDependencyConfigTool,
+)
+
+gtest_factory = DependencyFactory(
+    'gtest',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    pkgconfig_class=GTestDependencyPC,
+    system_class=GTestDependencySystem,
+)
+
+gmock_factory = DependencyFactory(
+    'gmock',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    pkgconfig_class=GMockDependencyPC,
+    system_class=GMockDependencySystem,
+)
+
+zlib_factory = DependencyFactory(
+    'zlib',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM],
+    cmake_name='ZLIB',
+    system_class=ZlibSystemDependency,
+)
diff --git a/meson/mesonbuild/dependencies/dub.py b/meson/mesonbuild/dependencies/dub.py
new file mode 100644
index 000000000..8dfb4869d
--- /dev/null
+++ b/meson/mesonbuild/dependencies/dub.py
@@ -0,0 +1,240 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, DependencyTypeName
+from .pkgconfig import PkgConfigDependency
+from ..mesonlib import Popen_safe
+from ..programs import ExternalProgram
+from ..compilers import DCompiler
+from .. import mlog
+import re
+import os
+import copy
+import json
+import platform
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class DubDependency(ExternalDependency):
+    class_dubbin = None
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(DependencyTypeName('dub'), environment, kwargs, language='d')
+        self.name = name
+        self.module_path: T.Optional[str] = None
+
+        _temp_comp = super().get_compiler()
+        assert isinstance(_temp_comp, DCompiler)
+        self.compiler = _temp_comp
+
+        if 'required' in kwargs:
+            self.required = kwargs.get('required')
+
+        if DubDependency.class_dubbin is None:
+            self.dubbin = self._check_dub()
+            DubDependency.class_dubbin = self.dubbin
+        else:
+            self.dubbin = DubDependency.class_dubbin
+
+        if not self.dubbin:
+            if self.required:
+                raise DependencyException('DUB not found.')
+            self.is_found = False
+            return
+
+        assert isinstance(self.dubbin, ExternalProgram)
+        mlog.debug('Determining dependency {!r} with DUB executable '
+                   '{!r}'.format(name, self.dubbin.get_path()))
+
+        # we need to know the target architecture
+        arch = self.compiler.arch
+
+        # Ask dub for the package
+        ret, res = self._call_dubbin(['describe', name, '--arch=' + arch])
+
+        if ret != 0:
+            self.is_found = False
+            return
+
+        comp = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc')
+        packages = []
+        description = json.loads(res)
+        for package in description['packages']:
+            packages.append(package['name'])
+            if package['name'] == name:
+                self.is_found = True
+
+                not_lib = True
+                if 'targetType' in package:
+                    if package['targetType'] in ['library', 'sourceLibrary', 'staticLibrary', 'dynamicLibrary']:
+                        not_lib = False
+
+                if not_lib:
+                    mlog.error(mlog.bold(name), "found but it isn't a library")
+                    self.is_found = False
+                    return
+
+                self.module_path = self._find_right_lib_path(package['path'], comp, description, True, package['targetFileName'])
+                if not os.path.exists(self.module_path):
+                    # check if the dependency was built for other archs
+                    archs = [['x86_64'], ['x86'], ['x86', 'x86_mscoff']]
+                    for a in archs:
+                        description_a = copy.deepcopy(description)
+                        description_a['architecture'] = a
+                        arch_module_path = self._find_right_lib_path(package['path'], comp, description_a, True, package['targetFileName'])
+                        if arch_module_path:
+                            mlog.error(mlog.bold(name), "found but it wasn't compiled for", mlog.bold(arch))
+                            self.is_found = False
+                            return
+
+                    mlog.error(mlog.bold(name), "found but it wasn't compiled with", mlog.bold(comp))
+                    self.is_found = False
+                    return
+
+                self.version = package['version']
+                self.pkg = package
+
+        if self.pkg['targetFileName'].endswith('.a'):
+            self.static = True
+
+        self.compile_args = []
+        for flag in self.pkg['dflags']:
+            self.link_args.append(flag)
+        for path in self.pkg['importPaths']:
+            self.compile_args.append('-I' + os.path.join(self.pkg['path'], path))
+
+        self.link_args = self.raw_link_args = []
+        for flag in self.pkg['lflags']:
+            self.link_args.append(flag)
+
+        self.link_args.append(os.path.join(self.module_path, self.pkg['targetFileName']))
+
+        # Handle dependencies
+        libs = []
+
+        def add_lib_args(field_name: str, target: T.Dict[str, T.Dict[str, str]]) -> None:
+            if field_name in target['buildSettings']:
+                for lib in target['buildSettings'][field_name]:
+                    if lib not in libs:
+                        libs.append(lib)
+                        if os.name != 'nt':
+                            pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'})
+                            for arg in pkgdep.get_compile_args():
+                                self.compile_args.append(arg)
+                            for arg in pkgdep.get_link_args():
+                                self.link_args.append(arg)
+                            for arg in pkgdep.get_link_args(raw=True):
+                                self.raw_link_args.append(arg)
+
+        for target in description['targets']:
+            if target['rootPackage'] in packages:
+                add_lib_args('libs', target)
+                add_lib_args(f'libs-{platform.machine()}', target)
+                for file in target['buildSettings']['linkerFiles']:
+                    lib_path = self._find_right_lib_path(file, comp, description)
+                    if lib_path:
+                        self.link_args.append(lib_path)
+                    else:
+                        self.is_found = False
+
+    def _find_right_lib_path(self,
+                             default_path: str,
+                             comp: str,
+                             description: T.Dict[str, str],
+                             folder_only: bool = False,
+                             file_name: str = '') -> T.Optional[str]:
+        module_path = lib_file_name = ''
+        if folder_only:
+            module_path = default_path
+            lib_file_name = file_name
+        else:
+            module_path = os.path.dirname(default_path)
+            lib_file_name = os.path.basename(default_path)
+        module_build_path = os.path.join(module_path, '.dub', 'build')
+
+        # If default_path is a path to lib file and
+        # directory of lib don't have subdir '.dub/build'
+        if not os.path.isdir(module_build_path) and os.path.isfile(default_path):
+            if folder_only:
+                return module_path
+            else:
+                return default_path
+
+        # Get D version implemented in the compiler
+        # gdc doesn't support this
+        ret, res = self._call_dubbin(['--version'])
+
+        if ret != 0:
+            mlog.error('Failed to run {!r}', mlog.bold(comp))
+            return None
+
+        d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2
+        if d_ver_reg is not None:
+            d_ver = d_ver_reg.group().rsplit('.', 1)[0].replace('v', '').replace('.', '') # Fix structure. Ex.: 2081
+        else:
+            d_ver = '' # gdc
+
+        if not os.path.isdir(module_build_path):
+            return ''
+
+        # Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
+        build_name = '-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
+        for entry in os.listdir(module_build_path):
+            if build_name in entry:
+                for file in os.listdir(os.path.join(module_build_path, entry)):
+                    if file == lib_file_name:
+                        if folder_only:
+                            return os.path.join(module_build_path, entry)
+                        else:
+                            return os.path.join(module_build_path, entry, lib_file_name)
+
+        return ''
+
+    def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str]:
+        assert isinstance(self.dubbin, ExternalProgram)
+        p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2]
+        return p.returncode, out.strip()
+
+    def _call_copmbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str]:
+        p, out = Popen_safe(self.compiler.get_exelist() + args, env=env)[0:2]
+        return p.returncode, out.strip()
+
+    def _check_dub(self) -> T.Union[bool, ExternalProgram]:
+        dubbin: T.Union[bool, ExternalProgram] = ExternalProgram('dub', silent=True)
+        assert isinstance(dubbin, ExternalProgram)
+        if dubbin.found():
+            try:
+                p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2]
+                if p.returncode != 0:
+                    mlog.warning('Found dub {!r} but couldn\'t run it'
+                                 ''.format(' '.join(dubbin.get_command())))
+                    # Set to False instead of None to signify that we've already
+                    # searched for it and not found it
+                    dubbin = False
+            except (FileNotFoundError, PermissionError):
+                dubbin = False
+        else:
+            dubbin = False
+        if isinstance(dubbin, ExternalProgram):
+            mlog.log('Found DUB:', mlog.bold(dubbin.get_path()),
+                     '(%s)' % out.strip())
+        else:
+            mlog.log('Found DUB:', mlog.red('NO'))
+        return dubbin
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.DUB]
diff --git a/meson/mesonbuild/dependencies/factory.py b/meson/mesonbuild/dependencies/factory.py
new file mode 100644
index 000000000..048e3bc58
--- /dev/null
+++ b/meson/mesonbuild/dependencies/factory.py
@@ -0,0 +1,151 @@
+# Copyright 2013-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import typing as T
+
+from ..mesonlib import MachineChoice
+from .base import DependencyException, DependencyMethods
+from .base import ExternalDependency
+from .base import process_method_kw
+from .base import BuiltinDependency, SystemDependency
+from .cmake import CMakeDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from .configtool import ConfigToolDependency
+
+    DependencyGenerator = T.Callable[[], ExternalDependency]
+    FactoryFunc = T.Callable[
+        [
+            'Environment',
+            MachineChoice,
+            T.Dict[str, T.Any],
+            T.List[DependencyMethods]
+        ],
+        T.List[DependencyGenerator]
+    ]
+
+    WrappedFactoryFunc = T.Callable[
+        [
+            'Environment',
+            MachineChoice,
+            T.Dict[str, T.Any]
+        ],
+        T.List[DependencyGenerator]
+    ]
+
+class DependencyFactory:
+
+    """Factory to get dependencies from multiple sources.
+
+    This class provides an initializer that takes a set of names and classes
+    for various kinds of dependencies. When the initialized object is called
+    it returns a list of callables return Dependency objects to try in order.
+
+    :name: The name of the dependency. This will be passed as the name
+        parameter of the each dependency unless it is overridden on a per
+        type basis.
+    :methods: An ordered list of DependencyMethods. This is the order
+        dependencies will be returned in unless they are removed by the
+        _process_method function
+    :*_name: This will overwrite the name passed to the coresponding class.
+        For example, if the name is 'zlib', but cmake calls the dependency
+        'Z', then using `cmake_name='Z'` will pass the name as 'Z' to cmake.
+    :*_class: A *type* or callable that creates a class, and has the
+        signature of an ExternalDependency
+    :system_class: If you pass DependencyMethods.SYSTEM in methods, you must
+        set this argument.
+    """
+
+    def __init__(self, name: str, methods: T.List[DependencyMethods], *,
+                 extra_kwargs: T.Optional[T.Dict[str, T.Any]] = None,
+                 pkgconfig_name: T.Optional[str] = None,
+                 pkgconfig_class: 'T.Type[PkgConfigDependency]' = PkgConfigDependency,
+                 cmake_name: T.Optional[str] = None,
+                 cmake_class: 'T.Type[CMakeDependency]' = CMakeDependency,
+                 configtool_class: 'T.Optional[T.Type[ConfigToolDependency]]' = None,
+                 framework_name: T.Optional[str] = None,
+                 framework_class: 'T.Type[ExtraFrameworkDependency]' = ExtraFrameworkDependency,
+                 builtin_class: 'T.Type[BuiltinDependency]' = BuiltinDependency,
+                 system_class: 'T.Type[SystemDependency]' = SystemDependency):
+
+        if DependencyMethods.CONFIG_TOOL in methods and not configtool_class:
+            raise DependencyException('A configtool must have a custom class')
+
+        self.extra_kwargs = extra_kwargs or {}
+        self.methods = methods
+        self.classes: T.Dict[
+            DependencyMethods,
+            T.Callable[['Environment', T.Dict[str, T.Any]], ExternalDependency]
+        ] = {
+            # Just attach the correct name right now, either the generic name
+            # or the method specific name.
+            DependencyMethods.EXTRAFRAMEWORK: lambda env, kwargs: framework_class(framework_name or name, env, kwargs),
+            DependencyMethods.PKGCONFIG:      lambda env, kwargs: pkgconfig_class(pkgconfig_name or name, env, kwargs),
+            DependencyMethods.CMAKE:          lambda env, kwargs: cmake_class(cmake_name or name, env, kwargs),
+            DependencyMethods.SYSTEM:         lambda env, kwargs: system_class(name, env, kwargs),
+            DependencyMethods.BUILTIN:        lambda env, kwargs: builtin_class(name, env, kwargs),
+            DependencyMethods.CONFIG_TOOL:    None,
+        }
+        if configtool_class is not None:
+            self.classes[DependencyMethods.CONFIG_TOOL] = lambda env, kwargs: configtool_class(name, env, kwargs)
+
+    @staticmethod
+    def _process_method(method: DependencyMethods, env: 'Environment', for_machine: MachineChoice) -> bool:
+        """Report whether a method is valid or not.
+
+        If the method is valid, return true, otherwise return false. This is
+        used in a list comprehension to filter methods that are not possible.
+
+        By default this only remove EXTRAFRAMEWORK dependencies for non-mac platforms.
+        """
+        # Extra frameworks are only valid for macOS and other apple products
+        if (method is DependencyMethods.EXTRAFRAMEWORK and
+                not env.machines[for_machine].is_darwin()):
+            return False
+        return True
+
+    def __call__(self, env: 'Environment', for_machine: MachineChoice,
+                 kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+        """Return a list of Dependencies with the arguments already attached."""
+        methods = process_method_kw(self.methods, kwargs)
+        nwargs = self.extra_kwargs.copy()
+        nwargs.update(kwargs)
+
+        return [functools.partial(self.classes[m], env, nwargs) for m in methods
+                if self._process_method(m, env, for_machine)]
+
+
+def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryFunc'], 'WrappedFactoryFunc']:
+    """Decorator for handling methods for dependency factory functions.
+
+    This helps to make factory functions self documenting
+    >>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE])
+    >>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    >>>     pass
+    """
+
+    def inner(func: 'FactoryFunc') -> 'WrappedFactoryFunc':
+
+        @functools.wraps(func)
+        def wrapped(env: 'Environment', for_machine: MachineChoice, kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+            return func(env, for_machine, kwargs, process_method_kw(methods, kwargs))
+
+        return wrapped
+
+    return inner
diff --git a/meson/mesonbuild/dependencies/framework.py b/meson/mesonbuild/dependencies/framework.py
new file mode 100644
index 000000000..48223987e
--- /dev/null
+++ b/meson/mesonbuild/dependencies/framework.py
@@ -0,0 +1,123 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import DependencyTypeName, ExternalDependency, DependencyException, DependencyMethods
+from ..mesonlib import MesonException, Version, stringlistify
+from .. import mlog
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class ExtraFrameworkDependency(ExternalDependency):
+    system_framework_paths: T.Optional[T.List[str]] = None
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        paths = stringlistify(kwargs.get('paths', []))
+        super().__init__(DependencyTypeName('extraframeworks'), env, kwargs, language=language)
+        self.name = name
+        # Full path to framework directory
+        self.framework_path: T.Optional[str] = None
+        if not self.clib_compiler:
+            raise DependencyException('No C-like compilers are available')
+        if self.system_framework_paths is None:
+            try:
+                self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env)
+            except MesonException as e:
+                if 'non-clang' in str(e):
+                    # Apple frameworks can only be found (and used) with the
+                    # system compiler. It is not available so bail immediately.
+                    self.is_found = False
+                    return
+                raise
+        self.detect(name, paths)
+
+    def detect(self, name: str, paths: T.List[str]) -> None:
+        if not paths:
+            paths = self.system_framework_paths
+        for p in paths:
+            mlog.debug(f'Looking for framework {name} in {p}')
+            # We need to know the exact framework path because it's used by the
+            # Qt5 dependency class, and for setting the include path. We also
+            # want to avoid searching in an invalid framework path which wastes
+            # time and can cause a false positive.
+            framework_path = self._get_framework_path(p, name)
+            if framework_path is None:
+                continue
+            # We want to prefer the specified paths (in order) over the system
+            # paths since these are "extra" frameworks.
+            # For example, Python2's framework is in /System/Library/Frameworks and
+            # Python3's framework is in /Library/Frameworks, but both are called
+            # Python.framework. We need to know for sure that the framework was
+            # found in the path we expect.
+            allow_system = p in self.system_framework_paths
+            args = self.clib_compiler.find_framework(name, self.env, [p], allow_system)
+            if args is None:
+                continue
+            self.link_args = args
+            self.framework_path = framework_path.as_posix()
+            self.compile_args = ['-F' + self.framework_path]
+            # We need to also add -I includes to the framework because all
+            # cross-platform projects such as OpenGL, Python, Qt, GStreamer,
+            # etc do not use "framework includes":
+            # https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html
+            incdir = self._get_framework_include_path(framework_path)
+            if incdir:
+                self.compile_args += ['-I' + incdir]
+            self.is_found = True
+            return
+
+    def _get_framework_path(self, path: str, name: str) -> T.Optional[Path]:
+        p = Path(path)
+        lname = name.lower()
+        for d in p.glob('*.framework/'):
+            if lname == d.name.rsplit('.', 1)[0].lower():
+                return d
+        return None
+
+    def _get_framework_latest_version(self, path: Path) -> str:
+        versions = []
+        for each in path.glob('Versions/*'):
+            # macOS filesystems are usually case-insensitive
+            if each.name.lower() == 'current':
+                continue
+            versions.append(Version(each.name))
+        if len(versions) == 0:
+            # most system frameworks do not have a 'Versions' directory
+            return 'Headers'
+        return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s)
+
+    def _get_framework_include_path(self, path: Path) -> T.Optional[str]:
+        # According to the spec, 'Headers' must always be a symlink to the
+        # Headers directory inside the currently-selected version of the
+        # framework, but sometimes frameworks are broken. Look in 'Versions'
+        # for the currently-selected version or pick the latest one.
+        trials = ('Headers', 'Versions/Current/Headers',
+                  self._get_framework_latest_version(path))
+        for each in trials:
+            trial = path / each
+            if trial.is_dir():
+                return trial.as_posix()
+        return None
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.EXTRAFRAMEWORK]
+
+    def log_info(self) -> str:
+        return self.framework_path or ''
+
+    def log_tried(self) -> str:
+        return 'framework'
diff --git a/meson/mesonbuild/dependencies/hdf5.py b/meson/mesonbuild/dependencies/hdf5.py
new file mode 100644
index 000000000..c062e713e
--- /dev/null
+++ b/meson/mesonbuild/dependencies/hdf5.py
@@ -0,0 +1,180 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+import functools
+import os
+import re
+import shutil
+import subprocess
+from pathlib import Path
+
+from ..mesonlib import OrderedSet, join_args
+from .base import DependencyException, DependencyMethods
+from .configtool import ConfigToolDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .base import Dependency
+    from .factory import DependencyGenerator
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+
+class HDF5PkgConfigDependency(PkgConfigDependency):
+
+    """Handle brokenness in the HDF5 pkg-config files."""
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        language = language or 'c'
+        if language not in {'c', 'cpp', 'fortran'}:
+            raise DependencyException(f'Language {language} is not supported with HDF5.')
+
+        super().__init__(name, environment, kwargs, language)
+        if not self.is_found:
+            return
+
+        # some broken pkgconfig don't actually list the full path to the needed includes
+        newinc = []  # type: T.List[str]
+        for arg in self.compile_args:
+            if arg.startswith('-I'):
+                stem = 'static' if kwargs.get('static', False) else 'shared'
+                if (Path(arg[2:]) / stem).is_dir():
+                    newinc.append('-I' + str(Path(arg[2:]) / stem))
+        self.compile_args += newinc
+
+        link_args = []  # type: T.List[str]
+        for larg in self.get_link_args():
+            lpath = Path(larg)
+            # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,
+            # so let's add them if they exist
+            # additionally, some pkgconfig HDF5 HL files are malformed so let's be sure to find HL anyway
+            if lpath.is_file():
+                hl = []
+                if language == 'cpp':
+                    hl += ['_hl_cpp', '_cpp']
+                elif language == 'fortran':
+                    hl += ['_hl_fortran', 'hl_fortran', '_fortran']
+                hl += ['_hl']  # C HL library, always needed
+
+                suffix = '.' + lpath.name.split('.', 1)[1]  # in case of .dll.a
+                for h in hl:
+                    hlfn = lpath.parent / (lpath.name.split('.', 1)[0] + h + suffix)
+                    if hlfn.is_file():
+                        link_args.append(str(hlfn))
+                # HDF5 C libs are required by other HDF5 languages
+                link_args.append(larg)
+            else:
+                link_args.append(larg)
+
+        self.link_args = link_args
+
+
+class HDF5ConfigToolDependency(ConfigToolDependency):
+
+    """Wrapper around hdf5 binary config tools."""
+
+    version_arg = '-showconfig'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        language = language or 'c'
+        if language not in {'c', 'cpp', 'fortran'}:
+            raise DependencyException(f'Language {language} is not supported with HDF5.')
+
+        if language == 'c':
+            cenv = 'CC'
+            tools = ['h5cc']
+        elif language == 'cpp':
+            cenv = 'CXX'
+            tools = ['h5c++']
+        elif language == 'fortran':
+            cenv = 'FC'
+            tools = ['h5fc']
+        else:
+            raise DependencyException('How did you get here?')
+
+        # We need this before we call super()
+        for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+        nkwargs = kwargs.copy()
+        nkwargs['tools'] = tools
+
+        # Override the compiler that the config tools are going to use by
+        # setting the environment variables that they use for the compiler and
+        # linkers.
+        compiler = environment.coredata.compilers[for_machine][language]
+        try:
+            os.environ[f'HDF5_{cenv}'] = join_args(compiler.get_exelist())
+            os.environ[f'HDF5_{cenv}LINKER'] = join_args(compiler.get_linker_exelist())
+            super().__init__(name, environment, nkwargs, language)
+        finally:
+            del os.environ[f'HDF5_{cenv}']
+            del os.environ[f'HDF5_{cenv}LINKER']
+        if not self.is_found:
+            return
+
+        # We first need to call the tool with -c to get the compile arguments
+        # and then without -c to get the link arguments.
+        args = self.get_config_value(['-show', '-c'], 'args')[1:]
+        args += self.get_config_value(['-show', '-noshlib' if kwargs.get('static', False) else '-shlib'], 'args')[1:]
+        for arg in args:
+            if arg.startswith(('-I', '-f', '-D')) or arg == '-pthread':
+                self.compile_args.append(arg)
+            elif arg.startswith(('-L', '-l', '-Wl')):
+                self.link_args.append(arg)
+            elif Path(arg).is_file():
+                self.link_args.append(arg)
+
+        # If the language is not C we need to add C as a subdependency
+        if language != 'c':
+            nkwargs = kwargs.copy()
+            nkwargs['language'] = 'c'
+            # I'm being too clever for mypy and pylint
+            self.is_found = self._add_sub_dependency(hdf5_factory(environment, for_machine, nkwargs))  # pylint: disable=no-value-for-parameter
+
+    def _sanitize_version(self, ver: str) -> str:
+        v = re.search(r'\s*HDF5 Version: (\d+\.\d+\.\d+)', ver)
+        return v.group(1)
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL})
+def hdf5_factory(env: 'Environment', for_machine: 'MachineChoice',
+                 kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    language = kwargs.get('language')
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        # Use an ordered set so that these remain the first tried pkg-config files
+        pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial'])
+        # FIXME: This won't honor pkg-config paths, and cross-native files
+        PCEXE = shutil.which('pkg-config')
+        if PCEXE:
+            # some distros put hdf5-1.2.3.pc with version number in .pc filename.
+            ret = subprocess.run([PCEXE, '--list-all'], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
+                                    universal_newlines=True)
+            if ret.returncode == 0:
+                for pkg in ret.stdout.split('\n'):
+                    if pkg.startswith('hdf5'):
+                        pkgconfig_files.add(pkg.split(' ', 1)[0])
+
+        for pkg in pkgconfig_files:
+            candidates.append(functools.partial(HDF5PkgConfigDependency, pkg, env, kwargs, language))
+
+    if DependencyMethods.CONFIG_TOOL in methods:
+        candidates.append(functools.partial(HDF5ConfigToolDependency, 'hdf5', env, kwargs, language))
+
+    return candidates
diff --git a/meson/mesonbuild/dependencies/misc.py b/meson/mesonbuild/dependencies/misc.py
new file mode 100644
index 000000000..483212f96
--- /dev/null
+++ b/meson/mesonbuild/dependencies/misc.py
@@ -0,0 +1,623 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+from pathlib import Path
+import functools
+import re
+import sysconfig
+import typing as T
+
+from .. import mesonlib
+from .. import mlog
+from ..environment import detect_cpu_family
+from .base import DependencyException, DependencyMethods
+from .base import BuiltinDependency, SystemDependency
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .factory import DependencyFactory, factory_methods
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment, MachineChoice
+    from .factory import DependencyGenerator
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def netcdf_factory(env: 'Environment',
+                   for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any],
+                   methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    language = kwargs.get('language', 'c')
+    if language not in ('c', 'cpp', 'fortran'):
+        raise DependencyException(f'Language {language} is not supported with NetCDF.')
+
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        if language == 'fortran':
+            pkg = 'netcdf-fortran'
+        else:
+            pkg = 'netcdf'
+
+        candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs, language=language))
+
+    if DependencyMethods.CMAKE in methods:
+        candidates.append(functools.partial(CMakeDependency, 'NetCDF', env, kwargs, language=language))
+
+    return candidates
+
+
+class OpenMPDependency(SystemDependency):
+    # Map date of specification release (which is the macro value) to a version.
+    VERSIONS = {
+        '201811': '5.0',
+        '201611': '5.0-revision1',  # This is supported by ICC 19.x
+        '201511': '4.5',
+        '201307': '4.0',
+        '201107': '3.1',
+        '200805': '3.0',
+        '200505': '2.5',
+        '200203': '2.0',
+        '199810': '1.0',
+    }
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        language = kwargs.get('language')
+        super().__init__('openmp', environment, kwargs, language=language)
+        self.is_found = False
+        if self.clib_compiler.get_id() == 'pgi':
+            # through at least PGI 19.4, there is no macro defined for OpenMP, but OpenMP 3.1 is supported.
+            self.version = '3.1'
+            self.is_found = True
+            self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+            return
+        try:
+            openmp_date = self.clib_compiler.get_define(
+                '_OPENMP', '', self.env, self.clib_compiler.openmp_flags(), [self], disable_cache=True)[0]
+        except mesonlib.EnvironmentException as e:
+            mlog.debug('OpenMP support not available in the compiler')
+            mlog.debug(e)
+            openmp_date = None
+
+        if openmp_date:
+            try:
+                self.version = self.VERSIONS[openmp_date]
+            except KeyError:
+                mlog.debug(f'Could not find an OpenMP version matching {openmp_date}')
+                if openmp_date == '_OPENMP':
+                    mlog.debug('This can be caused by flags such as gcc\'s `-fdirectives-only`, which affect preprocessor behavior.')
+                return
+            # Flang has omp_lib.h
+            header_names = ('omp.h', 'omp_lib.h')
+            for name in header_names:
+                if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
+                    self.is_found = True
+                    self.compile_args = self.clib_compiler.openmp_flags()
+                    self.link_args = self.clib_compiler.openmp_link_flags()
+                    break
+            if not self.is_found:
+                mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
+
+
+class ThreadDependency(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs)
+        self.is_found = True
+        # Happens if you are using a language with threads
+        # concept without C, such as plain Cuda.
+        if self.clib_compiler is None:
+            self.compile_args = []
+            self.link_args = []
+        else:
+            self.compile_args = self.clib_compiler.thread_flags(environment)
+            self.link_args = self.clib_compiler.thread_link_flags(environment)
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.AUTO, DependencyMethods.CMAKE]
+
+
+class BlocksDependency(SystemDependency):
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('blocks', environment, kwargs)
+        self.name = 'blocks'
+        self.is_found = False
+
+        if self.env.machines[self.for_machine].is_darwin():
+            self.compile_args = []
+            self.link_args = []
+        else:
+            self.compile_args = ['-fblocks']
+            self.link_args = ['-lBlocksRuntime']
+
+            if not self.clib_compiler.has_header('Block.h', '', environment, disable_cache=True) or \
+               not self.clib_compiler.find_library('BlocksRuntime', environment, []):
+                mlog.log(mlog.red('ERROR:'), 'BlocksRuntime not found.')
+                return
+
+        source = '''
+            int main(int argc, char **argv)
+            {
+                int (^callback)(void) = ^ int (void) { return 0; };
+                return callback();
+            }'''
+
+        with self.clib_compiler.compile(source, extra_args=self.compile_args + self.link_args) as p:
+            if p.returncode != 0:
+                mlog.log(mlog.red('ERROR:'), 'Compiler does not support blocks extension.')
+                return
+
+            self.is_found = True
+
+
+class Python3DependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs)
+
+        if not environment.machines.matches_build_machine(self.for_machine):
+            return
+        if not environment.machines[self.for_machine].is_windows():
+            return
+
+        self.name = 'python3'
+        self.static = kwargs.get('static', False)
+        # We can only be sure that it is Python 3 at this point
+        self.version = '3'
+        self._find_libpy3_windows(environment)
+
+    @staticmethod
+    def get_windows_python_arch() -> T.Optional[str]:
+        pyplat = sysconfig.get_platform()
+        if pyplat == 'mingw':
+            pycc = sysconfig.get_config_var('CC')
+            if pycc.startswith('x86_64'):
+                return '64'
+            elif pycc.startswith(('i686', 'i386')):
+                return '32'
+            else:
+                mlog.log(f'MinGW Python built with unknown CC {pycc!r}, please file a bug')
+                return None
+        elif pyplat == 'win32':
+            return '32'
+        elif pyplat in ('win64', 'win-amd64'):
+            return '64'
+        mlog.log(f'Unknown Windows Python platform {pyplat!r}')
+        return None
+
+    def get_windows_link_args(self) -> T.Optional[T.List[str]]:
+        pyplat = sysconfig.get_platform()
+        if pyplat.startswith('win'):
+            vernum = sysconfig.get_config_var('py_version_nodot')
+            if self.static:
+                libpath = Path('libs') / f'libpython{vernum}.a'
+            else:
+                comp = self.get_compiler()
+                if comp.id == "gcc":
+                    libpath = Path(f'python{vernum}.dll')
+                else:
+                    libpath = Path('libs') / f'python{vernum}.lib'
+            lib = Path(sysconfig.get_config_var('base')) / libpath
+        elif pyplat == 'mingw':
+            if self.static:
+                libname = sysconfig.get_config_var('LIBRARY')
+            else:
+                libname = sysconfig.get_config_var('LDLIBRARY')
+            lib = Path(sysconfig.get_config_var('LIBDIR')) / libname
+        if not lib.exists():
+            mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
+            return None
+        return [str(lib)]
+
+    def _find_libpy3_windows(self, env: 'Environment') -> None:
+        '''
+        Find python3 libraries on Windows and also verify that the arch matches
+        what we are building for.
+        '''
+        pyarch = self.get_windows_python_arch()
+        if pyarch is None:
+            self.is_found = False
+            return
+        arch = detect_cpu_family(env.coredata.compilers.host)
+        if arch == 'x86':
+            arch = '32'
+        elif arch == 'x86_64':
+            arch = '64'
+        else:
+            # We can't cross-compile Python 3 dependencies on Windows yet
+            mlog.log(f'Unknown architecture {arch!r} for',
+                     mlog.bold(self.name))
+            self.is_found = False
+            return
+        # Pyarch ends in '32' or '64'
+        if arch != pyarch:
+            mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but '
+                     'found {}-bit'.format(arch, pyarch))
+            self.is_found = False
+            return
+        # This can fail if the library is not found
+        largs = self.get_windows_link_args()
+        if largs is None:
+            self.is_found = False
+            return
+        self.link_args = largs
+        # Compile args
+        inc = sysconfig.get_path('include')
+        platinc = sysconfig.get_path('platinclude')
+        self.compile_args = ['-I' + inc]
+        if inc != platinc:
+            self.compile_args.append('-I' + platinc)
+        self.version = sysconfig.get_config_var('py_version')
+        self.is_found = True
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        if mesonlib.is_windows():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+        elif mesonlib.is_osx():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+        else:
+            return [DependencyMethods.PKGCONFIG]
+
+    def log_tried(self) -> str:
+        return 'sysconfig'
+
+class PcapDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['pcap-config']
+    tool_name = 'pcap-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_pcap_lib_version()
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+    def get_pcap_lib_version(self) -> T.Optional[str]:
+        # Since we seem to need to run a program to discover the pcap version,
+        # we can't do that when cross-compiling
+        # FIXME: this should be handled if we have an exe_wrapper
+        if not self.env.machines.matches_build_machine(self.for_machine):
+            return None
+
+        v = self.clib_compiler.get_return_value('pcap_lib_version', 'string',
+                                                '#include ', self.env, [], [self])
+        v = re.sub(r'libpcap version ', '', str(v))
+        v = re.sub(r' -- Apple version.*$', '', v)
+        return v
+
+
+class CupsDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['cups-config']
+    tool_name = 'cups-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        if mesonlib.is_osx():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE]
+        else:
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.CMAKE]
+
+
+class LibWmfDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['libwmf-config']
+    tool_name = 'libwmf-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class LibGCryptDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['libgcrypt-config']
+    tool_name = 'libgcrypt-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_config_value(['--version'], 'version')[0]
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class GpgmeDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['gpgme-config']
+    tool_name = 'gpg-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_config_value(['--version'], 'version')[0]
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class ShadercDependency(SystemDependency):
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('shaderc', environment, kwargs)
+
+        static_lib = 'shaderc_combined'
+        shared_lib = 'shaderc_shared'
+
+        libs = [shared_lib, static_lib]
+        if self.static:
+            libs.reverse()
+
+        cc = self.get_compiler()
+
+        for lib in libs:
+            self.link_args = cc.find_library(lib, environment, [])
+            if self.link_args is not None:
+                self.is_found = True
+
+                if self.static and lib != static_lib:
+                    mlog.warning(f'Static library {static_lib!r} not found for dependency '
+                                 f'{self.name!r}, may not be statically linked')
+
+                break
+
+    def log_tried(self) -> str:
+        return 'system'
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM, DependencyMethods.PKGCONFIG]
+
+
+class CursesConfigToolDependency(ConfigToolDependency):
+
+    """Use the curses config tools."""
+
+    tool = 'curses-config'
+    # ncurses5.4-config is for macOS Catalina
+    tools = ['ncursesw6-config', 'ncursesw5-config', 'ncurses6-config', 'ncurses5-config', 'ncurses5.4-config']
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+
+class CursesSystemDependency(SystemDependency):
+
+    """Curses dependency the hard way.
+
+    This replaces hand rolled find_library() and has_header() calls. We
+    provide this for portability reasons, there are a large number of curses
+    implementations, and the differences between them can be very annoying.
+    """
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+
+        candidates = [
+            ('pdcurses', ['pdcurses/curses.h']),
+            ('ncursesw',  ['ncursesw/ncurses.h', 'ncurses.h']),
+            ('ncurses',  ['ncurses/ncurses.h', 'ncurses/curses.h', 'ncurses.h']),
+            ('curses',  ['curses.h']),
+        ]
+
+        # Not sure how else to elegently break out of both loops
+        for lib, headers in candidates:
+            l = self.clib_compiler.find_library(lib, env, [])
+            if l:
+                for header in headers:
+                    h = self.clib_compiler.has_header(header, '', env)
+                    if h[0]:
+                        self.is_found = True
+                        self.link_args = l
+                        # Not sure how to find version for non-ncurses curses
+                        # implementations. The one in illumos/OpenIndiana
+                        # doesn't seem to have a version defined in the header.
+                        if lib.startswith('ncurses'):
+                            v, _ = self.clib_compiler.get_define('NCURSES_VERSION', f'#include <{header}>', env, [], [self])
+                            self.version = v.strip('"')
+                        if lib.startswith('pdcurses'):
+                            v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', f'#include <{header}>', env, [], [self])
+                            v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', f'#include <{header}>', env, [], [self])
+                            self.version = f'{v_major}.{v_minor}'
+
+                        # Check the version if possible, emit a wraning if we can't
+                        req = kwargs.get('version')
+                        if req:
+                            if self.version:
+                                self.is_found = mesonlib.version_compare(self.version, req)
+                            else:
+                                mlog.warning('Cannot determine version of curses to compare against.')
+
+                        if self.is_found:
+                            mlog.debug('Curses library:', l)
+                            mlog.debug('Curses header:', header)
+                            break
+            if self.is_found:
+                break
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM]
+
+
+class IntlBuiltinDependency(BuiltinDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+
+        if self.clib_compiler.has_function('ngettext', '', env)[0]:
+            self.is_found = True
+
+
+class IntlSystemDependency(SystemDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+
+        h = self.clib_compiler.has_header('libintl.h', '', env)
+        self.link_args =  self.clib_compiler.find_library('intl', env, [])
+
+        if h and self.link_args:
+            self.is_found = True
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def curses_factory(env: 'Environment',
+                   for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any],
+                   methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        pkgconfig_files = ['pdcurses', 'ncursesw', 'ncurses', 'curses']
+        for pkg in pkgconfig_files:
+            candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs))
+
+    # There are path handling problems with these methods on msys, and they
+    # don't apply to windows otherwise (cygwin is handled separately from
+    # windows)
+    if not env.machines[for_machine].is_windows():
+        if DependencyMethods.CONFIG_TOOL in methods:
+            candidates.append(functools.partial(CursesConfigToolDependency, 'curses', env, kwargs))
+
+        if DependencyMethods.SYSTEM in methods:
+            candidates.append(functools.partial(CursesSystemDependency, 'curses', env, kwargs))
+
+    return candidates
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM})
+def shaderc_factory(env: 'Environment',
+                    for_machine: 'MachineChoice',
+                    kwargs: T.Dict[str, T.Any],
+                    methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    """Custom DependencyFactory for ShaderC.
+
+    ShaderC's odd you get three different libraries from the same build
+    thing are just easier to represent as a separate function than
+    twisting DependencyFactory even more.
+    """
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        # ShaderC packages their shared and static libs together
+        # and provides different pkg-config files for each one. We
+        # smooth over this difference by handling the static
+        # keyword before handing off to the pkg-config handler.
+        shared_libs = ['shaderc']
+        static_libs = ['shaderc_combined', 'shaderc_static']
+
+        if kwargs.get('static', False):
+            c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+                 for name in static_libs + shared_libs]
+        else:
+            c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+                 for name in shared_libs + static_libs]
+        candidates.extend(c)
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(ShadercDependency, env, kwargs))
+
+    return candidates
+
+
+cups_factory = DependencyFactory(
+    'cups',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
+    configtool_class=CupsDependencyConfigTool,
+    cmake_name='Cups',
+)
+
+gpgme_factory = DependencyFactory(
+    'gpgme',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=GpgmeDependencyConfigTool,
+)
+
+libgcrypt_factory = DependencyFactory(
+    'libgcrypt',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=LibGCryptDependencyConfigTool,
+)
+
+libwmf_factory = DependencyFactory(
+    'libwmf',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=LibWmfDependencyConfigTool,
+)
+
+pcap_factory = DependencyFactory(
+    'pcap',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=PcapDependencyConfigTool,
+    pkgconfig_name='libpcap',
+)
+
+python3_factory = DependencyFactory(
+    'python3',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.EXTRAFRAMEWORK],
+    system_class=Python3DependencySystem,
+    # There is no version number in the macOS version number
+    framework_name='Python',
+    # There is a python in /System/Library/Frameworks, but thats python 2.x,
+    # Python 3 will always be in /Library
+    extra_kwargs={'paths': ['/Library/Frameworks']},
+)
+
+threads_factory = DependencyFactory(
+    'threads',
+    [DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+    cmake_name='Threads',
+    system_class=ThreadDependency,
+)
+
+intl_factory = DependencyFactory(
+    'intl',
+    [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM],
+    builtin_class=IntlBuiltinDependency,
+    system_class=IntlSystemDependency,
+)
diff --git a/meson/mesonbuild/dependencies/mpi.py b/meson/mesonbuild/dependencies/mpi.py
new file mode 100644
index 000000000..2354767c2
--- /dev/null
+++ b/meson/mesonbuild/dependencies/mpi.py
@@ -0,0 +1,236 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import typing as T
+import os
+import re
+
+from ..environment import detect_cpu_family
+from .base import DependencyMethods, detect_compiler, SystemDependency
+from .configtool import ConfigToolDependency
+from .factory import factory_methods
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from .factory import DependencyGenerator
+    from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def mpi_factory(env: 'Environment',
+                for_machine: 'MachineChoice',
+                kwargs: T.Dict[str, T.Any],
+                methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    language = kwargs.get('language', 'c')
+    if language not in {'c', 'cpp', 'fortran'}:
+        # OpenMPI doesn't work without any other languages
+        return []
+
+    candidates: T.List['DependencyGenerator'] = []
+    compiler = detect_compiler('mpi', env, for_machine, language)
+    if compiler is None:
+        return []
+    compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
+
+    # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers
+    if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel:
+        pkg_name = None
+        if language == 'c':
+            pkg_name = 'ompi-c'
+        elif language == 'cpp':
+            pkg_name = 'ompi-cxx'
+        elif language == 'fortran':
+            pkg_name = 'ompi-fort'
+        candidates.append(functools.partial(
+            PkgConfigDependency, pkg_name, env, kwargs, language=language))
+
+    if DependencyMethods.CONFIG_TOOL in methods:
+        nwargs = kwargs.copy()
+
+        if compiler_is_intel:
+            if env.machines[for_machine].is_windows():
+                nwargs['version_arg'] = '-v'
+                nwargs['returncode_value'] = 3
+
+            if language == 'c':
+                tool_names = [os.environ.get('I_MPI_CC'), 'mpiicc']
+            elif language == 'cpp':
+                tool_names = [os.environ.get('I_MPI_CXX'), 'mpiicpc']
+            elif language == 'fortran':
+                tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
+
+            cls = IntelMPIConfigToolDependency  # type: T.Type[ConfigToolDependency]
+        else: # OpenMPI, which doesn't work with intel
+            #
+            # We try the environment variables for the tools first, but then
+            # fall back to the hardcoded names
+            if language == 'c':
+                tool_names = [os.environ.get('MPICC'), 'mpicc']
+            elif language == 'cpp':
+                tool_names = [os.environ.get('MPICXX'), 'mpic++', 'mpicxx', 'mpiCC']
+            elif language == 'fortran':
+                tool_names = [os.environ.get(e) for e in ['MPIFC', 'MPIF90', 'MPIF77']]
+                tool_names.extend(['mpifort', 'mpif90', 'mpif77'])
+
+            cls = OpenMPIConfigToolDependency
+
+        tool_names = [t for t in tool_names if t]  # remove empty environment variables
+        assert tool_names
+
+        nwargs['tools'] = tool_names
+        candidates.append(functools.partial(
+            cls, tool_names[0], env, nwargs, language=language))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(
+            MSMPIDependency, 'msmpi', env, kwargs, language=language))
+
+    return candidates
+
+
+class _MPIConfigToolDependency(ConfigToolDependency):
+
+    def _filter_compile_args(self, args: T.Sequence[str]) -> T.List[str]:
+        """
+        MPI wrappers return a bunch of garbage args.
+        Drop -O2 and everything that is not needed.
+        """
+        result = []
+        multi_args: T.Tuple[str, ...] = ('-I', )
+        if self.language == 'fortran':
+            fc = self.env.coredata.compilers[self.for_machine]['fortran']
+            multi_args += fc.get_module_incdir_args()
+
+        include_next = False
+        for f in args:
+            if f.startswith(('-D', '-f') + multi_args) or f == '-pthread' \
+                    or (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')):
+                result.append(f)
+                if f in multi_args:
+                    # Path is a separate argument.
+                    include_next = True
+            elif include_next:
+                include_next = False
+                result.append(f)
+        return result
+
+    def _filter_link_args(self, args: T.Sequence[str]) -> T.List[str]:
+        """
+        MPI wrappers return a bunch of garbage args.
+        Drop -O2 and everything that is not needed.
+        """
+        result = []
+        include_next = False
+        for f in args:
+            if self._is_link_arg(f):
+                result.append(f)
+                if f in ('-L', '-Xlinker'):
+                    include_next = True
+            elif include_next:
+                include_next = False
+                result.append(f)
+        return result
+
+    def _is_link_arg(self, f: str) -> bool:
+        if self.clib_compiler.id == 'intel-cl':
+            return f == '/link' or f.startswith('/LIBPATH') or f.endswith('.lib')   # always .lib whether static or dynamic
+        else:
+            return (f.startswith(('-L', '-l', '-Xlinker')) or
+                    f == '-pthread' or
+                    (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')))
+
+
+class IntelMPIConfigToolDependency(_MPIConfigToolDependency):
+
+    """Wrapper around Intel's mpiicc and friends."""
+
+    version_arg = '-v'  # --version is not the same as -v
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        if not self.is_found:
+            return
+
+        args = self.get_config_value(['-show'], 'link and compile args')
+        self.compile_args = self._filter_compile_args(args)
+        self.link_args = self._filter_link_args(args)
+
+    def _sanitize_version(self, out: str) -> str:
+        v = re.search(r'(\d{4}) Update (\d)', out)
+        if v:
+            return '{}.{}'.format(v.group(1), v.group(2))
+        return out
+
+
+class OpenMPIConfigToolDependency(_MPIConfigToolDependency):
+
+    """Wrapper around OpenMPI mpicc and friends."""
+
+    version_arg = '--showme:version'
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        if not self.is_found:
+            return
+
+        c_args = self.get_config_value(['--showme:compile'], 'compile_args')
+        self.compile_args = self._filter_compile_args(c_args)
+
+        l_args = self.get_config_value(['--showme:link'], 'link_args')
+        self.link_args = self._filter_link_args(l_args)
+
+    def _sanitize_version(self, out: str) -> str:
+        v = re.search(r'\d+.\d+.\d+', out)
+        if v:
+            return v.group(0)
+        return out
+
+
+class MSMPIDependency(SystemDependency):
+
+    """The Microsoft MPI."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        # MSMPI only supports the C API
+        if language not in {'c', 'fortran', None}:
+            self.is_found = False
+            return
+        # MSMPI is only for windows, obviously
+        if not self.env.machines[self.for_machine].is_windows():
+            return
+
+        incdir = os.environ.get('MSMPI_INC')
+        arch = detect_cpu_family(self.env.coredata.compilers.host)
+        libdir = None
+        if arch == 'x86':
+            libdir = os.environ.get('MSMPI_LIB32')
+            post = 'x86'
+        elif arch == 'x86_64':
+            libdir = os.environ.get('MSMPI_LIB64')
+            post = 'x64'
+
+        if libdir is None or incdir is None:
+            self.is_found = False
+            return
+
+        self.is_found = True
+        self.link_args = ['-l' + os.path.join(libdir, 'msmpi')]
+        self.compile_args = ['-I' + incdir, '-I' + os.path.join(incdir, post)]
+        if self.language == 'fortran':
+            self.link_args.append('-l' + os.path.join(libdir, 'msmpifec'))
diff --git a/meson/mesonbuild/dependencies/pkgconfig.py b/meson/mesonbuild/dependencies/pkgconfig.py
new file mode 100644
index 000000000..f09750467
--- /dev/null
+++ b/meson/mesonbuild/dependencies/pkgconfig.py
@@ -0,0 +1,503 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, sort_libpaths, DependencyTypeName
+from ..mesonlib import LibType, MachineChoice, OptionKey, OrderedSet, PerMachine, Popen_safe
+from ..programs import find_external_program, ExternalProgram
+from .. import mlog
+from pathlib import PurePath
+import re
+import os
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class PkgConfigDependency(ExternalDependency):
+    # The class's copy of the pkg-config path. Avoids having to search for it
+    # multiple times in the same Meson invocation.
+    class_pkgbin: PerMachine[T.Union[None, bool, ExternalProgram]] = PerMachine(None, None)
+    # We cache all pkg-config subprocess invocations to avoid redundant calls
+    pkgbin_cache: T.Dict[
+        T.Tuple[ExternalProgram, T.Tuple[str, ...], T.FrozenSet[T.Tuple[str, str]]],
+        T.Tuple[int, str, str]
+    ] = {}
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        super().__init__(DependencyTypeName('pkgconfig'), environment, kwargs, language=language)
+        self.name = name
+        self.is_libtool = False
+        # Store a copy of the pkg-config path on the object itself so it is
+        # stored in the pickled coredata and recovered.
+        self.pkgbin: T.Union[None, bool, ExternalProgram] = None
+
+        # Only search for pkg-config for each machine the first time and store
+        # the result in the class definition
+        if PkgConfigDependency.class_pkgbin[self.for_machine] is False:
+            mlog.debug('Pkg-config binary for %s is cached as not found.' % self.for_machine)
+        elif PkgConfigDependency.class_pkgbin[self.for_machine] is not None:
+            mlog.debug('Pkg-config binary for %s is cached.' % self.for_machine)
+        else:
+            assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
+            mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
+            for potential_pkgbin in find_external_program(
+                    self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
+                    environment.default_pkgconfig, allow_default_for_cross=False):
+                version_if_ok = self.check_pkgconfig(potential_pkgbin)
+                if not version_if_ok:
+                    continue
+                if not self.silent:
+                    mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
+                             '(%s)' % version_if_ok)
+                PkgConfigDependency.class_pkgbin[self.for_machine] = potential_pkgbin
+                break
+            else:
+                if not self.silent:
+                    mlog.log('Found Pkg-config:', mlog.red('NO'))
+                # Set to False instead of None to signify that we've already
+                # searched for it and not found it
+                PkgConfigDependency.class_pkgbin[self.for_machine] = False
+
+        self.pkgbin = PkgConfigDependency.class_pkgbin[self.for_machine]
+        if self.pkgbin is False:
+            self.pkgbin = None
+            msg = 'Pkg-config binary for machine %s not found. Giving up.' % self.for_machine
+            if self.required:
+                raise DependencyException(msg)
+            else:
+                mlog.debug(msg)
+                return
+
+        assert isinstance(self.pkgbin, ExternalProgram)
+        mlog.debug('Determining dependency {!r} with pkg-config executable '
+                   '{!r}'.format(name, self.pkgbin.get_path()))
+        ret, self.version, _ = self._call_pkgbin(['--modversion', name])
+        if ret != 0:
+            return
+
+        self.is_found = True
+
+        try:
+            # Fetch cargs to be used while using this dependency
+            self._set_cargs()
+            # Fetch the libraries and library paths needed for using this
+            self._set_libs()
+        except DependencyException as e:
+            mlog.debug(f"pkg-config error with '{name}': {e}")
+            if self.required:
+                raise
+            else:
+                self.compile_args = []
+                self.link_args = []
+                self.is_found = False
+                self.reason = e
+
+    def __repr__(self) -> str:
+        s = '<{0} {1}: {2} {3}>'
+        return s.format(self.__class__.__name__, self.name, self.is_found,
+                        self.version_reqs)
+
+    def _call_pkgbin_real(self, args: T.List[str], env: T.Dict[str, str]) -> T.Tuple[int, str, str]:
+        assert isinstance(self.pkgbin, ExternalProgram)
+        cmd = self.pkgbin.get_command() + args
+        p, out, err = Popen_safe(cmd, env=env)
+        rc, out, err = p.returncode, out.strip(), err.strip()
+        call = ' '.join(cmd)
+        mlog.debug(f"Called `{call}` -> {rc}\n{out}")
+        return rc, out, err
+
+    @staticmethod
+    def setup_env(env: T.MutableMapping[str, str], environment: 'Environment', for_machine: MachineChoice,
+                  extra_path: T.Optional[str] = None) -> None:
+        extra_paths: T.List[str] = environment.coredata.options[OptionKey('pkg_config_path', machine=for_machine)].value[:]
+        if extra_path and extra_path not in extra_paths:
+            extra_paths.append(extra_path)
+        sysroot = environment.properties[for_machine].get_sys_root()
+        if sysroot:
+            env['PKG_CONFIG_SYSROOT_DIR'] = sysroot
+        new_pkg_config_path = ':'.join([p for p in extra_paths])
+        env['PKG_CONFIG_PATH'] = new_pkg_config_path
+
+        pkg_config_libdir_prop = environment.properties[for_machine].get_pkg_config_libdir()
+        if pkg_config_libdir_prop:
+            new_pkg_config_libdir = ':'.join([p for p in pkg_config_libdir_prop])
+            env['PKG_CONFIG_LIBDIR'] = new_pkg_config_libdir
+        # Dump all PKG_CONFIG environment variables
+        for key, value in env.items():
+            if key.startswith('PKG_'):
+                mlog.debug(f'env[{key}]: {value}')
+
+    def _call_pkgbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
+        # Always copy the environment since we're going to modify it
+        # with pkg-config variables
+        if env is None:
+            env = os.environ.copy()
+        else:
+            env = env.copy()
+
+        assert isinstance(self.pkgbin, ExternalProgram)
+        PkgConfigDependency.setup_env(env, self.env, self.for_machine)
+
+        fenv = frozenset(env.items())
+        targs = tuple(args)
+        cache = PkgConfigDependency.pkgbin_cache
+        if (self.pkgbin, targs, fenv) not in cache:
+            cache[(self.pkgbin, targs, fenv)] = self._call_pkgbin_real(args, env)
+        return cache[(self.pkgbin, targs, fenv)]
+
+    def _convert_mingw_paths(self, args: T.List[str]) -> T.List[str]:
+        '''
+        Both MSVC and native Python on Windows cannot handle MinGW-esque /c/foo
+        paths so convert them to C:/foo. We cannot resolve other paths starting
+        with / like /home/foo so leave them as-is so that the user gets an
+        error/warning from the compiler/linker.
+        '''
+        if not self.env.machines.build.is_windows():
+            return args
+        converted = []
+        for arg in args:
+            pargs: T.Tuple[str, ...] = tuple()
+            # Library search path
+            if arg.startswith('-L/'):
+                pargs = PurePath(arg[2:]).parts
+                tmpl = '-L{}:/{}'
+            elif arg.startswith('-I/'):
+                pargs = PurePath(arg[2:]).parts
+                tmpl = '-I{}:/{}'
+            # Full path to library or .la file
+            elif arg.startswith('/'):
+                pargs = PurePath(arg).parts
+                tmpl = '{}:/{}'
+            elif arg.startswith(('-L', '-I')) or (len(arg) > 2 and arg[1] == ':'):
+                # clean out improper '\\ ' as comes from some Windows pkg-config files
+                arg = arg.replace('\\ ', ' ')
+            if len(pargs) > 1 and len(pargs[1]) == 1:
+                arg = tmpl.format(pargs[1], '/'.join(pargs[2:]))
+            converted.append(arg)
+        return converted
+
+    def _split_args(self, cmd: str) -> T.List[str]:
+        # pkg-config paths follow Unix conventions, even on Windows; split the
+        # output using shlex.split rather than mesonlib.split_args
+        return shlex.split(cmd)
+
+    def _set_cargs(self) -> None:
+        env = None
+        if self.language == 'fortran':
+            # gfortran doesn't appear to look in system paths for INCLUDE files,
+            # so don't allow pkg-config to suppress -I flags for system paths
+            env = os.environ.copy()
+            env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+        ret, out, err = self._call_pkgbin(['--cflags', self.name], env=env)
+        if ret != 0:
+            raise DependencyException('Could not generate cargs for %s:\n%s\n' %
+                                      (self.name, err))
+        self.compile_args = self._convert_mingw_paths(self._split_args(out))
+
+    def _search_libs(self, out: str, out_raw: str) -> T.Tuple[T.List[str], T.List[str]]:
+        '''
+        @out: PKG_CONFIG_ALLOW_SYSTEM_LIBS=1 pkg-config --libs
+        @out_raw: pkg-config --libs
+
+        We always look for the file ourselves instead of depending on the
+        compiler to find it with -lfoo or foo.lib (if possible) because:
+        1. We want to be able to select static or shared
+        2. We need the full path of the library to calculate RPATH values
+        3. De-dup of libraries is easier when we have absolute paths
+
+        Libraries that are provided by the toolchain or are not found by
+        find_library() will be added with -L -l pairs.
+        '''
+        # Library paths should be safe to de-dup
+        #
+        # First, figure out what library paths to use. Originally, we were
+        # doing this as part of the loop, but due to differences in the order
+        # of -L values between pkg-config and pkgconf, we need to do that as
+        # a separate step. See:
+        # https://github.com/mesonbuild/meson/issues/3951
+        # https://github.com/mesonbuild/meson/issues/4023
+        #
+        # Separate system and prefix paths, and ensure that prefix paths are
+        # always searched first.
+        prefix_libpaths: OrderedSet[str] = OrderedSet()
+        # We also store this raw_link_args on the object later
+        raw_link_args = self._convert_mingw_paths(self._split_args(out_raw))
+        for arg in raw_link_args:
+            if arg.startswith('-L') and not arg.startswith(('-L-l', '-L-L')):
+                path = arg[2:]
+                if not os.path.isabs(path):
+                    # Resolve the path as a compiler in the build directory would
+                    path = os.path.join(self.env.get_build_dir(), path)
+                prefix_libpaths.add(path)
+        # Library paths are not always ordered in a meaningful way
+        #
+        # Instead of relying on pkg-config or pkgconf to provide -L flags in a
+        # specific order, we reorder library paths ourselves, according to th
+        # order specified in PKG_CONFIG_PATH. See:
+        # https://github.com/mesonbuild/meson/issues/4271
+        #
+        # Only prefix_libpaths are reordered here because there should not be
+        # too many system_libpaths to cause library version issues.
+        pkg_config_path: T.List[str] = self.env.coredata.options[OptionKey('pkg_config_path', machine=self.for_machine)].value
+        pkg_config_path = self._convert_mingw_paths(pkg_config_path)
+        prefix_libpaths = OrderedSet(sort_libpaths(list(prefix_libpaths), pkg_config_path))
+        system_libpaths: OrderedSet[str] = OrderedSet()
+        full_args = self._convert_mingw_paths(self._split_args(out))
+        for arg in full_args:
+            if arg.startswith(('-L-l', '-L-L')):
+                # These are D language arguments, not library paths
+                continue
+            if arg.startswith('-L') and arg[2:] not in prefix_libpaths:
+                system_libpaths.add(arg[2:])
+        # Use this re-ordered path list for library resolution
+        libpaths = list(prefix_libpaths) + list(system_libpaths)
+        # Track -lfoo libraries to avoid duplicate work
+        libs_found: OrderedSet[str] = OrderedSet()
+        # Track not-found libraries to know whether to add library paths
+        libs_notfound = []
+        libtype = LibType.STATIC if self.static else LibType.PREFER_SHARED
+        # Generate link arguments for this library
+        link_args = []
+        for lib in full_args:
+            if lib.startswith(('-L-l', '-L-L')):
+                # These are D language arguments, add them as-is
+                pass
+            elif lib.startswith('-L'):
+                # We already handled library paths above
+                continue
+            elif lib.startswith('-l:'):
+                # see: https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a
+                # also : See the documentation of -lnamespec | --library=namespec in the linker manual  
+                #                     https://sourceware.org/binutils/docs-2.18/ld/Options.html
+                
+                # Don't resolve the same -l:libfoo.a argument again
+                if lib in libs_found:
+                    continue
+                libfilename = lib[3:]
+                foundname = None
+                for libdir in libpaths:
+                    target = os.path.join(libdir, libfilename)
+                    if os.path.exists(target):
+                        foundname = target
+                        break
+                if foundname is None:
+                    if lib in libs_notfound:
+                        continue
+                    else:
+                        mlog.warning('Library {!r} not found for dependency {!r}, may '
+                                    'not be successfully linked'.format(libfilename, self.name))
+                    libs_notfound.append(lib)
+                else:
+                    lib = foundname
+            elif lib.startswith('-l'):
+                # Don't resolve the same -lfoo argument again
+                if lib in libs_found:
+                    continue
+                if self.clib_compiler:
+                    args = self.clib_compiler.find_library(lib[2:], self.env,
+                                                           libpaths, libtype)
+                # If the project only uses a non-clib language such as D, Rust,
+                # C#, Python, etc, all we can do is limp along by adding the
+                # arguments as-is and then adding the libpaths at the end.
+                else:
+                    args = None
+                if args is not None:
+                    libs_found.add(lib)
+                    # Replace -l arg with full path to library if available
+                    # else, library is either to be ignored, or is provided by
+                    # the compiler, can't be resolved, and should be used as-is
+                    if args:
+                        if not args[0].startswith('-l'):
+                            lib = args[0]
+                    else:
+                        continue
+                else:
+                    # Library wasn't found, maybe we're looking in the wrong
+                    # places or the library will be provided with LDFLAGS or
+                    # LIBRARY_PATH from the environment (on macOS), and many
+                    # other edge cases that we can't account for.
+                    #
+                    # Add all -L paths and use it as -lfoo
+                    if lib in libs_notfound:
+                        continue
+                    if self.static:
+                        mlog.warning('Static library {!r} not found for dependency {!r}, may '
+                                     'not be statically linked'.format(lib[2:], self.name))
+                    libs_notfound.append(lib)
+            elif lib.endswith(".la"):
+                shared_libname = self.extract_libtool_shlib(lib)
+                shared_lib = os.path.join(os.path.dirname(lib), shared_libname)
+                if not os.path.exists(shared_lib):
+                    shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
+
+                if not os.path.exists(shared_lib):
+                    raise DependencyException('Got a libtools specific "%s" dependencies'
+                                              'but we could not compute the actual shared'
+                                              'library path' % lib)
+                self.is_libtool = True
+                lib = shared_lib
+                if lib in link_args:
+                    continue
+            link_args.append(lib)
+        # Add all -Lbar args if we have -lfoo args in link_args
+        if libs_notfound:
+            # Order of -L flags doesn't matter with ld, but it might with other
+            # linkers such as MSVC, so prepend them.
+            link_args = ['-L' + lp for lp in prefix_libpaths] + link_args
+        return link_args, raw_link_args
+
+    def _set_libs(self) -> None:
+        env = None
+        libcmd = ['--libs']
+
+        if self.static:
+            libcmd.append('--static')
+
+        libcmd.append(self.name)
+
+        # Force pkg-config to output -L fields even if they are system
+        # paths so we can do manual searching with cc.find_library() later.
+        env = os.environ.copy()
+        env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1'
+        ret, out, err = self._call_pkgbin(libcmd, env=env)
+        if ret != 0:
+            raise DependencyException('Could not generate libs for %s:\n%s\n' %
+                                      (self.name, err))
+        # Also get the 'raw' output without -Lfoo system paths for adding -L
+        # args with -lfoo when a library can't be found, and also in
+        # gnome.generate_gir + gnome.gtkdoc which need -L -l arguments.
+        ret, out_raw, err_raw = self._call_pkgbin(libcmd)
+        if ret != 0:
+            raise DependencyException('Could not generate libs for %s:\n\n%s' %
+                                      (self.name, out_raw))
+        self.link_args, self.raw_link_args = self._search_libs(out, out_raw)
+
+    def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Union[str, T.List[str]]]) -> str:
+        options = ['--variable=' + variable_name, self.name]
+
+        if 'define_variable' in kwargs:
+            definition = kwargs.get('define_variable', [])
+            if not isinstance(definition, list):
+                raise DependencyException('define_variable takes a list')
+
+            if len(definition) != 2 or not all(isinstance(i, str) for i in definition):
+                raise DependencyException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
+
+            options = ['--define-variable=' + '='.join(definition)] + options
+
+        ret, out, err = self._call_pkgbin(options)
+        variable = ''
+        if ret != 0:
+            if self.required:
+                raise DependencyException('dependency %s not found:\n%s\n' %
+                                          (self.name, err))
+        else:
+            variable = out.strip()
+
+            # pkg-config doesn't distinguish between empty and non-existent variables
+            # use the variable list to check for variable existence
+            if not variable:
+                ret, out, _ = self._call_pkgbin(['--print-variables', self.name])
+                if not re.search(r'^' + variable_name + r'$', out, re.MULTILINE):
+                    if 'default' in kwargs:
+                        assert isinstance(kwargs['default'], str)
+                        variable = kwargs['default']
+                    else:
+                        mlog.warning(f"pkgconfig variable '{variable_name}' not defined for dependency {self.name}.")
+
+        mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}')
+        return variable
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.PKGCONFIG]
+
+    def check_pkgconfig(self, pkgbin: ExternalProgram) -> T.Optional[str]:
+        if not pkgbin.found():
+            mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}')
+            return None
+        try:
+            p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
+            if p.returncode != 0:
+                mlog.warning('Found pkg-config {!r} but it failed when run'
+                             ''.format(' '.join(pkgbin.get_command())))
+                return None
+        except FileNotFoundError:
+            mlog.warning('We thought we found pkg-config {!r} but now it\'s not there. How odd!'
+                         ''.format(' '.join(pkgbin.get_command())))
+            return None
+        except PermissionError:
+            msg = 'Found pkg-config {!r} but didn\'t have permissions to run it.'.format(' '.join(pkgbin.get_command()))
+            if not self.env.machines.build.is_windows():
+                msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+            mlog.warning(msg)
+            return None
+        return out.strip()
+
+    def extract_field(self, la_file: str, fieldname: str) -> T.Optional[str]:
+        with open(la_file, encoding='utf-8') as f:
+            for line in f:
+                arr = line.strip().split('=')
+                if arr[0] == fieldname:
+                    return arr[1][1:-1]
+        return None
+
+    def extract_dlname_field(self, la_file: str) -> T.Optional[str]:
+        return self.extract_field(la_file, 'dlname')
+
+    def extract_libdir_field(self, la_file: str) -> T.Optional[str]:
+        return self.extract_field(la_file, 'libdir')
+
+    def extract_libtool_shlib(self, la_file: str) -> T.Optional[str]:
+        '''
+        Returns the path to the shared library
+        corresponding to this .la file
+        '''
+        dlname = self.extract_dlname_field(la_file)
+        if dlname is None:
+            return None
+
+        # Darwin uses absolute paths where possible; since the libtool files never
+        # contain absolute paths, use the libdir field
+        if self.env.machines[self.for_machine].is_darwin():
+            dlbasename = os.path.basename(dlname)
+            libdir = self.extract_libdir_field(la_file)
+            if libdir is None:
+                return dlbasename
+            return os.path.join(libdir, dlbasename)
+        # From the comments in extract_libtool(), older libtools had
+        # a path rather than the raw dlname
+        return os.path.basename(dlname)
+
+    def log_tried(self) -> str:
+        return self.type_name
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+        if pkgconfig:
+            kwargs: T.Dict[str, T.Union[str, T.List[str]]] = {}
+            if default_value is not None:
+                kwargs['default'] = default_value
+            if pkgconfig_define is not None:
+                kwargs['define_variable'] = pkgconfig_define
+            try:
+                return self.get_pkgconfig_variable(pkgconfig, kwargs)
+            except DependencyException:
+                pass
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'Could not get pkg-config variable and no default provided for {self!r}')
diff --git a/meson/mesonbuild/dependencies/platform.py b/meson/mesonbuild/dependencies/platform.py
new file mode 100644
index 000000000..7759b0f93
--- /dev/null
+++ b/meson/mesonbuild/dependencies/platform.py
@@ -0,0 +1,58 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that are
+# platform-specific (generally speaking).
+
+from .base import DependencyTypeName, ExternalDependency, DependencyException
+from ..mesonlib import MesonException
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class AppleFrameworks(ExternalDependency):
+    def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(DependencyTypeName('appleframeworks'), env, kwargs)
+        modules = kwargs.get('modules', [])
+        if isinstance(modules, str):
+            modules = [modules]
+        if not modules:
+            raise DependencyException("AppleFrameworks dependency requires at least one module.")
+        self.frameworks = modules
+        if not self.clib_compiler:
+            raise DependencyException('No C-like compilers are available, cannot find the framework')
+        self.is_found = True
+        for f in self.frameworks:
+            try:
+                args = self.clib_compiler.find_framework(f, env, [])
+            except MesonException as e:
+                if 'non-clang' in str(e):
+                    self.is_found = False
+                    self.link_args = []
+                    self.compile_args = []
+                    return
+                raise
+
+            if args is not None:
+                # No compile args are needed for system frameworks
+                self.link_args += args
+            else:
+                self.is_found = False
+
+    def log_info(self) -> str:
+        return ', '.join(self.frameworks)
+
+    def log_tried(self) -> str:
+        return 'framework'
diff --git a/meson/mesonbuild/dependencies/qt.py b/meson/mesonbuild/dependencies/qt.py
new file mode 100644
index 000000000..4eef71e33
--- /dev/null
+++ b/meson/mesonbuild/dependencies/qt.py
@@ -0,0 +1,438 @@
+# Copyright 2013-2017 The Meson development team
+# Copyright © 2021 Intel Corporation
+# SPDX-license-identifier: Apache-2.0
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Dependency finders for the Qt framework."""
+
+import abc
+import re
+import os
+import typing as T
+
+from .base import DependencyException, DependencyMethods
+from .configtool import ConfigToolDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import DependencyFactory
+from .. import mlog
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+    from ..compilers import Compiler
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+
+
+def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) -> T.List[str]:
+    # usually Qt5 puts private headers in /QT_INSTALL_HEADERS/module/VERSION/module/private
+    # except for at least QtWebkit and Enginio where the module version doesn't match Qt version
+    # as an example with Qt 5.10.1 on linux you would get:
+    # /usr/include/qt5/QtCore/5.10.1/QtCore/private/
+    # /usr/include/qt5/QtWidgets/5.10.1/QtWidgets/private/
+    # /usr/include/qt5/QtWebKit/5.212.0/QtWebKit/private/
+
+    # on Qt4 when available private folder is directly in module folder
+    # like /usr/include/QtCore/private/
+    if int(mod_version.split('.')[0]) < 5:
+        return []
+
+    private_dir = os.path.join(mod_inc_dir, mod_version)
+    # fallback, let's try to find a directory with the latest version
+    if not os.path.exists(private_dir):
+        dirs = [filename for filename in os.listdir(mod_inc_dir)
+                if os.path.isdir(os.path.join(mod_inc_dir, filename))]
+
+        for dirname in sorted(dirs, reverse=True):
+            if len(dirname.split('.')) == 3:
+                private_dir = dirname
+                break
+    return [private_dir, os.path.join(private_dir, 'Qt' + module)]
+
+
+def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str:
+    # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
+    # but fall back to QT_INSTALL_BINS (qt4)
+    if 'QT_HOST_BINS' in qvars:
+        return qvars['QT_HOST_BINS']
+    return qvars['QT_INSTALL_BINS']
+
+
+def _get_modules_lib_suffix(version: str, info: 'MachineInfo', is_debug: bool) -> str:
+    """Get the module suffix based on platform and debug type."""
+    suffix = ''
+    if info.is_windows():
+        if is_debug:
+            suffix += 'd'
+        if version.startswith('4'):
+            suffix += '4'
+    if info.is_darwin():
+        if is_debug:
+            suffix += '_debug'
+    if mesonlib.version_compare(version, '>= 5.14.0'):
+        if info.is_android():
+            if info.cpu_family == 'x86':
+                suffix += '_x86'
+            elif info.cpu_family == 'x86_64':
+                suffix += '_x86_64'
+            elif info.cpu_family == 'arm':
+                suffix += '_armeabi-v7a'
+            elif info.cpu_family == 'aarch64':
+                suffix += '_arm64-v8a'
+            else:
+                mlog.warning(f'Android target arch "{info.cpu_family}"" for Qt5 is unknown, '
+                             'module detection may not work')
+    return suffix
+
+
+class QtExtraFrameworkDependency(ExtraFrameworkDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        self.mod_name = name[2:]
+
+    def get_compile_args(self, with_private_headers: bool = False, qt_version: str = "0") -> T.List[str]:
+        if self.found():
+            mod_inc_dir = os.path.join(self.framework_path, 'Headers')
+            args = ['-I' + mod_inc_dir]
+            if with_private_headers:
+                args += ['-I' + dirname for dirname in _qt_get_private_includes(mod_inc_dir, self.mod_name, qt_version)]
+            return args
+        return []
+
+
+class _QtBase:
+
+    """Mixin class for shared componenets between PkgConfig and Qmake."""
+
+    link_args: T.List[str]
+    clib_compiler: 'Compiler'
+    env: 'Environment'
+
+    def __init__(self, name: str, kwargs: T.Dict[str, T.Any]):
+        self.qtname = name.capitalize()
+        self.qtver = name[-1]
+        if self.qtver == "4":
+            self.qtpkgname = 'Qt'
+        else:
+            self.qtpkgname = self.qtname
+
+        self.private_headers = T.cast(bool, kwargs.get('private_headers', False))
+
+        self.requested_modules = mesonlib.stringlistify(mesonlib.extract_as_list(kwargs, 'modules'))
+        if not self.requested_modules:
+            raise DependencyException('No ' + self.qtname + '  modules specified.')
+
+        self.qtmain = T.cast(bool, kwargs.get('main', False))
+        if not isinstance(self.qtmain, bool):
+            raise DependencyException('"main" argument must be a boolean')
+
+    def _link_with_qtmain(self, is_debug: bool, libdir: T.Union[str, T.List[str]]) -> bool:
+        libdir = mesonlib.listify(libdir)  # TODO: shouldn't be necessary
+        base_name = 'qtmaind' if is_debug else 'qtmain'
+        qtmain = self.clib_compiler.find_library(base_name, self.env, libdir)
+        if qtmain:
+            self.link_args.append(qtmain[0])
+            return True
+        return False
+
+    def get_exe_args(self, compiler: 'Compiler') -> T.List[str]:
+        # Originally this was -fPIE but nowadays the default
+        # for upstream and distros seems to be -reduce-relocations
+        # which requires -fPIC. This may cause a performance
+        # penalty when using self-built Qt or on platforms
+        # where -fPIC is not required. If this is an issue
+        # for you, patches are welcome.
+        return compiler.get_pic_args()
+
+    def log_details(self) -> str:
+        return f'modules: {", ".join(sorted(self.requested_modules))}'
+
+
+class QtPkgConfigDependency(_QtBase, PkgConfigDependency, metaclass=abc.ABCMeta):
+
+    """Specialization of the PkgConfigDependency for Qt."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        _QtBase.__init__(self, name, kwargs)
+
+        # Always use QtCore as the "main" dependency, since it has the extra
+        # pkg-config variables that a user would expect to get. If "Core" is
+        # not a requested module, delete the compile and link arguments to
+        # avoid linking with something they didn't ask for
+        PkgConfigDependency.__init__(self, self.qtpkgname + 'Core', env, kwargs)
+        if 'Core' not in self.requested_modules:
+            self.compile_args = []
+            self.link_args = []
+
+        for m in self.requested_modules:
+            mod = PkgConfigDependency(self.qtpkgname + m, self.env, kwargs, language=self.language)
+            if not mod.found():
+                self.is_found = False
+                return
+            if self.private_headers:
+                qt_inc_dir = mod.get_pkgconfig_variable('includedir', {})
+                mod_private_dir = os.path.join(qt_inc_dir, 'Qt' + m)
+                if not os.path.isdir(mod_private_dir):
+                    # At least some versions of homebrew don't seem to set this
+                    # up correctly. /usr/local/opt/qt/include/Qt + m_name is a
+                    # symlink to /usr/local/opt/qt/include, but the pkg-config
+                    # file points to /usr/local/Cellar/qt/x.y.z/Headers/, and
+                    # the Qt + m_name there is not a symlink, it's a file
+                    mod_private_dir = qt_inc_dir
+                mod_private_inc = _qt_get_private_includes(mod_private_dir, m, mod.version)
+                for directory in mod_private_inc:
+                    mod.compile_args.append('-I' + directory)
+            self._add_sub_dependency([lambda: mod])
+
+        if self.env.machines[self.for_machine].is_windows() and self.qtmain:
+            # Check if we link with debug binaries
+            debug_lib_name = self.qtpkgname + 'Core' + _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], True)
+            is_debug = False
+            for arg in self.get_link_args():
+                if arg == f'-l{debug_lib_name}' or arg.endswith(f'{debug_lib_name}.lib') or arg.endswith(f'{debug_lib_name}.a'):
+                    is_debug = True
+                    break
+            libdir = self.get_pkgconfig_variable('libdir', {})
+            if not self._link_with_qtmain(is_debug, libdir):
+                self.is_found = False
+                return
+
+        self.bindir = self.get_pkgconfig_host_bins(self)
+        if not self.bindir:
+            # If exec_prefix is not defined, the pkg-config file is broken
+            prefix = self.get_pkgconfig_variable('exec_prefix', {})
+            if prefix:
+                self.bindir = os.path.join(prefix, 'bin')
+
+    @staticmethod
+    @abc.abstractmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]:
+        pass
+
+    @abc.abstractmethod
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        pass
+
+    def log_info(self) -> str:
+        return 'pkg-config'
+
+
+class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
+
+    """Find Qt using Qmake as a config-tool."""
+
+    tool_name = 'qmake'
+    version_arg = '-v'
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        _QtBase.__init__(self, name, kwargs)
+        self.tools = [f'qmake-{self.qtname}', 'qmake']
+
+        # Add additional constraits that the Qt version is met, but preserve
+        # any version requrements the user has set as well. For exmaple, if Qt5
+        # is requested, add "">= 5, < 6", but if the user has ">= 5.6", don't
+        # lose that.
+        kwargs = kwargs.copy()
+        _vers = mesonlib.listify(kwargs.get('version', []))
+        _vers.extend([f'>= {self.qtver}', f'< {int(self.qtver) + 1}'])
+        kwargs['version'] = _vers
+
+        ConfigToolDependency.__init__(self, name, env, kwargs)
+        if not self.found():
+            return
+
+        # Query library path, header path, and binary path
+        stdo = self.get_config_value(['-query'], 'args')
+        qvars: T.Dict[str, str] = {}
+        for line in stdo:
+            line = line.strip()
+            if line == '':
+                continue
+            k, v = line.split(':', 1)
+            qvars[k] = v
+        # Qt on macOS uses a framework, but Qt for iOS/tvOS does not
+        xspec = qvars.get('QMAKE_XSPEC', '')
+        if self.env.machines.host.is_darwin() and not any(s in xspec for s in ['ios', 'tvos']):
+            mlog.debug("Building for macOS, looking for framework")
+            self._framework_detect(qvars, self.requested_modules, kwargs)
+            # Sometimes Qt is built not as a framework (for instance, when using conan pkg manager)
+            # skip and fall back to normal procedure then
+            if self.is_found:
+                return
+            else:
+                mlog.debug("Building for macOS, couldn't find framework, falling back to library search")
+        incdir = qvars['QT_INSTALL_HEADERS']
+        self.compile_args.append('-I' + incdir)
+        libdir = qvars['QT_INSTALL_LIBS']
+        # Used by qt.compilers_detect()
+        self.bindir = get_qmake_host_bins(qvars)
+
+        # Use the buildtype by default, but look at the b_vscrt option if the
+        # compiler supports it.
+        is_debug = self.env.coredata.get_option(mesonlib.OptionKey('buildtype')) == 'debug'
+        if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options:
+            if self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value in {'mdd', 'mtd'}:
+                is_debug = True
+        modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
+
+        for module in self.requested_modules:
+            mincdir = os.path.join(incdir, 'Qt' + module)
+            self.compile_args.append('-I' + mincdir)
+
+            if module == 'QuickTest':
+                define_base = 'QMLTEST'
+            elif module == 'Test':
+                define_base = 'TESTLIB'
+            else:
+                define_base = module.upper()
+            self.compile_args.append(f'-DQT_{define_base}_LIB')
+
+            if self.private_headers:
+                priv_inc = self.get_private_includes(mincdir, module)
+                for directory in priv_inc:
+                    self.compile_args.append('-I' + directory)
+            libfiles = self.clib_compiler.find_library(
+                self.qtpkgname + module + modules_lib_suffix, self.env,
+                mesonlib.listify(libdir)) # TODO: shouldn't be necissary
+            if libfiles:
+                libfile = libfiles[0]
+            else:
+                mlog.log("Could not find:", module,
+                         self.qtpkgname + module + modules_lib_suffix,
+                         'in', libdir)
+                self.is_found = False
+                break
+            self.link_args.append(libfile)
+
+        if self.env.machines[self.for_machine].is_windows() and self.qtmain:
+            if not self._link_with_qtmain(is_debug, libdir):
+                self.is_found = False
+
+    def _sanitize_version(self, version: str) -> str:
+        m = re.search(rf'({self.qtver}(\.\d+)+)', version)
+        if m:
+            return m.group(0).rstrip('.')
+        return version
+
+    @abc.abstractmethod
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        pass
+
+    def _framework_detect(self, qvars: T.Dict[str, str], modules: T.List[str], kwargs: T.Dict[str, T.Any]) -> None:
+        libdir = qvars['QT_INSTALL_LIBS']
+
+        # ExtraFrameworkDependency doesn't support any methods
+        fw_kwargs = kwargs.copy()
+        fw_kwargs.pop('method', None)
+        fw_kwargs['paths'] = [libdir]
+
+        for m in modules:
+            fname = 'Qt' + m
+            mlog.debug('Looking for qt framework ' + fname)
+            fwdep = QtExtraFrameworkDependency(fname, self.env, fw_kwargs, language=self.language)
+            if fwdep.found():
+                self.compile_args.append('-F' + libdir)
+                self.compile_args += fwdep.get_compile_args(with_private_headers=self.private_headers,
+                                                            qt_version=self.version)
+                self.link_args += fwdep.get_link_args()
+            else:
+                self.is_found = False
+                break
+        else:
+            self.is_found = True
+            # Used by self.compilers_detect()
+            self.bindir = get_qmake_host_bins(qvars)
+
+    def log_info(self) -> str:
+        return 'qmake'
+
+
+class Qt4ConfigToolDependency(QmakeQtDependency):
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return []
+
+
+class Qt5ConfigToolDependency(QmakeQtDependency):
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt6ConfigToolDependency(QmakeQtDependency):
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt4PkgConfigDependency(QtPkgConfigDependency):
+
+    @staticmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]:
+        # Only return one bins dir, because the tools are generally all in one
+        # directory for Qt4, in Qt5, they must all be in one directory. Return
+        # the first one found among the bin variables, in case one tool is not
+        # configured to be built.
+        applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
+        for application in applications:
+            try:
+                return os.path.dirname(core.get_pkgconfig_variable('%s_location' % application, {}))
+            except mesonlib.MesonException:
+                pass
+        return None
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return []
+
+
+class Qt5PkgConfigDependency(QtPkgConfigDependency):
+
+    @staticmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str:
+        return core.get_pkgconfig_variable('host_bins', {})
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt6PkgConfigDependency(QtPkgConfigDependency):
+
+    @staticmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str:
+        return core.get_pkgconfig_variable('host_bins', {})
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+qt4_factory = DependencyFactory(
+    'qt4',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    pkgconfig_class=Qt4PkgConfigDependency,
+    configtool_class=Qt4ConfigToolDependency,
+)
+
+qt5_factory = DependencyFactory(
+    'qt5',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    pkgconfig_class=Qt5PkgConfigDependency,
+    configtool_class=Qt5ConfigToolDependency,
+)
+
+qt6_factory = DependencyFactory(
+    'qt6',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    pkgconfig_class=Qt6PkgConfigDependency,
+    configtool_class=Qt6ConfigToolDependency,
+)
diff --git a/meson/mesonbuild/dependencies/scalapack.py b/meson/mesonbuild/dependencies/scalapack.py
new file mode 100644
index 000000000..707e69850
--- /dev/null
+++ b/meson/mesonbuild/dependencies/scalapack.py
@@ -0,0 +1,153 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import Path
+import functools
+import os
+import typing as T
+
+from .base import DependencyMethods
+from .base import DependencyException
+from .cmake import CMakeDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment, MachineChoice
+    from .factory import DependencyGenerator
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def scalapack_factory(env: 'Environment', for_machine: 'MachineChoice',
+                      kwargs: T.Dict[str, T.Any],
+                      methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        mkl = 'mkl-static-lp64-iomp' if kwargs.get('static', False) else 'mkl-dynamic-lp64-iomp'
+        candidates.append(functools.partial(
+            MKLPkgConfigDependency, mkl, env, kwargs))
+
+        for pkg in ['scalapack-openmpi', 'scalapack']:
+            candidates.append(functools.partial(
+                PkgConfigDependency, pkg, env, kwargs))
+
+    if DependencyMethods.CMAKE in methods:
+        candidates.append(functools.partial(
+            CMakeDependency, 'Scalapack', env, kwargs))
+
+    return candidates
+
+
+class MKLPkgConfigDependency(PkgConfigDependency):
+
+    """PkgConfigDependency for Intel MKL.
+
+    MKL's pkg-config is pretty much borked in every way. We need to apply a
+    bunch of fixups to make it work correctly.
+    """
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        _m = os.environ.get('MKLROOT')
+        self.__mklroot = Path(_m).resolve() if _m else None
+
+        # We need to call down into the normal super() method even if we don't
+        # find mklroot, otherwise we won't have all of the instance variables
+        # initialized that meson expects.
+        super().__init__(name, env, kwargs, language=language)
+
+        # Doesn't work with gcc on windows, but does on Linux
+        if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
+                                   and self.clib_compiler.id == 'gcc')):
+            self.is_found = False
+
+        # This can happen either because we're using GCC, we couldn't find the
+        # mklroot, or the pkg-config couldn't find it.
+        if not self.is_found:
+            return
+
+        assert self.version != '', 'This should not happen if we didn\'t return above'
+
+        if self.version == 'unknown':
+            # At least by 2020 the version is in the pkg-config, just not with
+            # the correct name
+            v = self.get_variable(pkgconfig='Version', default_value='')
+
+            if not v and self.__mklroot:
+                try:
+                    v = (
+                        self.__mklroot.as_posix()
+                        .split('compilers_and_libraries_')[1]
+                        .split('/', 1)[0]
+                    )
+                except IndexError:
+                    pass
+
+            if v:
+                assert isinstance(v, str)
+                self.version = v
+
+    def _set_libs(self) -> None:
+        super()._set_libs()
+
+        if self.env.machines[self.for_machine].is_windows():
+            suffix = '.lib'
+        elif self.static:
+            suffix = '.a'
+        else:
+            suffix = ''
+        libdir = self.__mklroot / 'lib/intel64'
+
+        if self.clib_compiler.id == 'gcc':
+            for i, a in enumerate(self.link_args):
+                # only replace in filename, not in directory names
+                dirname, basename = os.path.split(a)
+                if 'mkl_intel_lp64' in basename:
+                    basename = basename.replace('intel', 'gf')
+                    self.link_args[i] = '/' + os.path.join(dirname, basename)
+        # MKL pkg-config omits scalapack
+        # be sure "-L" and "-Wl" are first if present
+        i = 0
+        for j, a in enumerate(self.link_args):
+            if a.startswith(('-L', '-Wl')):
+                i = j + 1
+            elif j > 3:
+                break
+        if self.env.machines[self.for_machine].is_windows() or self.static:
+            self.link_args.insert(
+                i, str(libdir / ('mkl_scalapack_lp64' + suffix))
+            )
+            self.link_args.insert(
+                i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix))
+            )
+        else:
+            self.link_args.insert(i, '-lmkl_scalapack_lp64')
+            self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
+
+    def _set_cargs(self) -> None:
+        env = None
+        if self.language == 'fortran':
+            # gfortran doesn't appear to look in system paths for INCLUDE files,
+            # so don't allow pkg-config to suppress -I flags for system paths
+            env = os.environ.copy()
+            env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+        ret, out, err = self._call_pkgbin([
+            '--cflags', self.name,
+            '--define-variable=prefix=' + self.__mklroot.as_posix()],
+            env=env)
+        if ret != 0:
+            raise DependencyException('Could not generate cargs for %s:\n%s\n' %
+                                      (self.name, err))
+        self.compile_args = self._convert_mingw_paths(self._split_args(out))
diff --git a/meson/mesonbuild/dependencies/ui.py b/meson/mesonbuild/dependencies/ui.py
new file mode 100644
index 000000000..f256a370c
--- /dev/null
+++ b/meson/mesonbuild/dependencies/ui.py
@@ -0,0 +1,277 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+import os
+import subprocess
+import typing as T
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import (
+    Popen_safe, extract_as_list, version_compare_many
+)
+from ..environment import detect_cpu_family
+
+from .base import DependencyException, DependencyMethods, DependencyTypeName, SystemDependency
+from .configtool import ConfigToolDependency
+from .factory import DependencyFactory
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+
+class GLDependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs)
+
+        if self.env.machines[self.for_machine].is_darwin():
+            self.is_found = True
+            # FIXME: Use AppleFrameworks dependency
+            self.link_args = ['-framework', 'OpenGL']
+            # FIXME: Detect version using self.clib_compiler
+            return
+        if self.env.machines[self.for_machine].is_windows():
+            self.is_found = True
+            # FIXME: Use self.clib_compiler.find_library()
+            self.link_args = ['-lopengl32']
+            # FIXME: Detect version using self.clib_compiler
+            return
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        if mesonlib.is_osx() or mesonlib.is_windows():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+        else:
+            return [DependencyMethods.PKGCONFIG]
+
+    def log_tried(self) -> str:
+        return 'system'
+
+class GnuStepDependency(ConfigToolDependency):
+
+    tools = ['gnustep-config']
+    tool_name = 'gnustep-config'
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('gnustep', environment, kwargs, language='objc')
+        if not self.is_found:
+            return
+        self.modules = kwargs.get('modules', [])
+        self.compile_args = self.filter_args(
+            self.get_config_value(['--objc-flags'], 'compile_args'))
+        self.link_args = self.weird_filter(self.get_config_value(
+            ['--gui-libs' if 'gui' in self.modules else '--base-libs'],
+            'link_args'))
+
+    def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+        tool = [self.tools[0]]
+        try:
+            p, out = Popen_safe(tool + ['--help'])[:2]
+        except (FileNotFoundError, PermissionError):
+            return (None, None)
+        if p.returncode != returncode:
+            return (None, None)
+        self.config = tool
+        found_version = self.detect_version()
+        if versions and not version_compare_many(found_version, versions)[0]:
+            return (None, found_version)
+
+        return (tool, found_version)
+
+    @staticmethod
+    def weird_filter(elems: T.List[str]) -> T.List[str]:
+        """When building packages, the output of the enclosing Make is
+        sometimes mixed among the subprocess output. I have no idea why. As a
+        hack filter out everything that is not a flag.
+        """
+        return [e for e in elems if e.startswith('-')]
+
+    @staticmethod
+    def filter_args(args: T.List[str]) -> T.List[str]:
+        """gnustep-config returns a bunch of garbage args such as -O2 and so
+        on. Drop everything that is not needed.
+        """
+        result = []
+        for f in args:
+            if f.startswith('-D') \
+                    or f.startswith('-f') \
+                    or f.startswith('-I') \
+                    or f == '-pthread' \
+                    or (f.startswith('-W') and not f == '-Wall'):
+                result.append(f)
+        return result
+
+    def detect_version(self) -> str:
+        gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0]
+        makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0]
+        # This Makefile has the GNUStep version set
+        base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
+        # Print the Makefile variable passed as the argument. For instance, if
+        # you run the make target `print-SOME_VARIABLE`, this will print the
+        # value of the variable `SOME_VARIABLE`.
+        printver = "print-%:\n\t@echo '$($*)'"
+        env = os.environ.copy()
+        # See base.make to understand why this is set
+        env['FOUNDATION_LIB'] = 'gnu'
+        p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
+                              'print-GNUSTEP_BASE_VERSION'],
+                             env=env, write=printver, stdin=subprocess.PIPE)
+        version = o.strip()
+        if not version:
+            mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
+            # Fallback to setting some 1.x version
+            version = '1'
+        return version
+
+
+class SDL2DependencyConfigTool(ConfigToolDependency):
+
+    tools = ['sdl2-config']
+    tool_name = 'sdl2-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        if mesonlib.is_osx():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
+        else:
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class WxDependency(ConfigToolDependency):
+
+    tools = ['wx-config-3.0', 'wx-config', 'wx-config-gtk3']
+    tool_name = 'wx-config'
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('WxWidgets', environment, kwargs, language='cpp')
+        if not self.is_found:
+            return
+        self.requested_modules = self.get_requested(kwargs)
+
+        extra_args = []
+        if self.static:
+            extra_args.append('--static=yes')
+
+            # Check to make sure static is going to work
+            err = Popen_safe(self.config + extra_args)[2]
+            if 'No config found to match' in err:
+                mlog.debug('WxWidgets is missing static libraries.')
+                self.is_found = False
+                return
+
+        # wx-config seems to have a cflags as well but since it requires C++,
+        # this should be good, at least for now.
+        self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args')
+        self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args')
+
+    @staticmethod
+    def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        if 'modules' not in kwargs:
+            return []
+        candidates = extract_as_list(kwargs, 'modules')
+        for c in candidates:
+            if not isinstance(c, str):
+                raise DependencyException('wxwidgets module argument is not a string')
+        return candidates
+
+
+class VulkanDependencySystem(SystemDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        super().__init__(name, environment, kwargs, language=language)
+
+        try:
+            self.vulkan_sdk = os.environ['VULKAN_SDK']
+            if not os.path.isabs(self.vulkan_sdk):
+                raise DependencyException('VULKAN_SDK must be an absolute path.')
+        except KeyError:
+            self.vulkan_sdk = None
+
+        if self.vulkan_sdk:
+            # TODO: this config might not work on some platforms, fix bugs as reported
+            # we should at least detect other 64-bit platforms (e.g. armv8)
+            lib_name = 'vulkan'
+            lib_dir = 'lib'
+            inc_dir = 'include'
+            if mesonlib.is_windows():
+                lib_name = 'vulkan-1'
+                lib_dir = 'Lib32'
+                inc_dir = 'Include'
+                if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64':
+                    lib_dir = 'Lib'
+
+            # make sure header and lib are valid
+            inc_path = os.path.join(self.vulkan_sdk, inc_dir)
+            header = os.path.join(inc_path, 'vulkan', 'vulkan.h')
+            lib_path = os.path.join(self.vulkan_sdk, lib_dir)
+            find_lib = self.clib_compiler.find_library(lib_name, environment, [lib_path])
+
+            if not find_lib:
+                raise DependencyException('VULKAN_SDK point to invalid directory (no lib)')
+
+            if not os.path.isfile(header):
+                raise DependencyException('VULKAN_SDK point to invalid directory (no include)')
+
+            # XXX: this is very odd, and may deserve being removed
+            self.type_name = DependencyTypeName('vulkan_sdk')
+            self.is_found = True
+            self.compile_args.append('-I' + inc_path)
+            self.link_args.append('-L' + lib_path)
+            self.link_args.append('-l' + lib_name)
+
+            # TODO: find a way to retrieve the version from the sdk?
+            # Usually it is a part of the path to it (but does not have to be)
+            return
+        else:
+            # simply try to guess it, usually works on linux
+            libs = self.clib_compiler.find_library('vulkan', environment, [])
+            if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment, disable_cache=True)[0]:
+                self.is_found = True
+                for lib in libs:
+                    self.link_args.append(lib)
+                return
+
+    @staticmethod
+    def get_methods() -> T.List[DependencyMethods]:
+        return [DependencyMethods.SYSTEM]
+
+    def log_tried(self) -> str:
+        return 'system'
+
+gl_factory = DependencyFactory(
+    'gl',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=GLDependencySystem,
+)
+
+sdl2_factory = DependencyFactory(
+    'sdl2',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK],
+    configtool_class=SDL2DependencyConfigTool,
+)
+
+vulkan_factory = DependencyFactory(
+    'vulkan',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=VulkanDependencySystem,
+)
diff --git a/meson/mesonbuild/depfile.py b/meson/mesonbuild/depfile.py
new file mode 100644
index 000000000..62cbe8125
--- /dev/null
+++ b/meson/mesonbuild/depfile.py
@@ -0,0 +1,85 @@
+# Copyright 2019 Red Hat, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+
+def parse(lines):
+    rules = []
+    targets = []
+    deps = []
+    in_deps = False
+    out = ''
+    for line in lines:
+        if not line.endswith('\n'):
+            line += '\n'
+        escape = None
+        for c in line:
+            if escape:
+                if escape == '$' and c != '$':
+                    out += '$'
+                if escape == '\\' and c == '\n':
+                    continue
+                out += c
+                escape = None
+                continue
+            if c == '\\' or c == '$':
+                escape = c
+                continue
+            elif c in (' ', '\n'):
+                if out != '':
+                    if in_deps:
+                        deps.append(out)
+                    else:
+                        targets.append(out)
+                out = ''
+                if c == '\n':
+                    rules.append((targets, deps))
+                    targets = []
+                    deps = []
+                    in_deps = False
+                continue
+            elif c == ':':
+                targets.append(out)
+                out = ''
+                in_deps = True
+                continue
+            out += c
+    return rules
+
+Target = collections.namedtuple('Target', ['deps'])
+
+class DepFile:
+    def __init__(self, lines):
+        rules = parse(lines)
+        depfile = {}
+        for (targets, deps) in rules:
+            for target in targets:
+                t = depfile.setdefault(target, Target(deps=set()))
+                for dep in deps:
+                    t.deps.add(dep)
+        self.depfile = depfile
+
+    def get_all_dependencies(self, target, visited=None):
+        deps = set()
+        if not visited:
+            visited = set()
+        if target in visited:
+            return set()
+        visited.add(target)
+        target = self.depfile.get(target)
+        if not target:
+            return set()
+        deps.update(target.deps)
+        for dep in target.deps:
+            deps.update(self.get_all_dependencies(dep, visited))
+        return sorted(deps)
diff --git a/meson/mesonbuild/envconfig.py b/meson/mesonbuild/envconfig.py
new file mode 100644
index 000000000..307aac30e
--- /dev/null
+++ b/meson/mesonbuild/envconfig.py
@@ -0,0 +1,425 @@
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import typing as T
+from enum import Enum
+
+from . import mesonlib
+from .mesonlib import EnvironmentException, HoldableObject
+from . import mlog
+from pathlib import Path
+
+
+# These classes contains all the data pulled from configuration files (native
+# and cross file currently), and also assists with the reading environment
+# variables.
+#
+# At this time there isn't an ironclad difference between this an other sources
+# of state like `coredata`. But one rough guide is much what is in `coredata` is
+# the *output* of the configuration process: the final decisions after tests.
+# This, on the other hand has *inputs*. The config files are parsed, but
+# otherwise minimally transformed. When more complex fallbacks (environment
+# detection) exist, they are defined elsewhere as functions that construct
+# instances of these classes.
+
+
+known_cpu_families = (
+    'aarch64',
+    'alpha',
+    'arc',
+    'arm',
+    'avr',
+    'c2000',
+    'csky',
+    'dspic',
+    'e2k',
+    'ia64',
+    'loongarch64',
+    'm68k',
+    'microblaze',
+    'mips',
+    'mips64',
+    'parisc',
+    'pic24',
+    'ppc',
+    'ppc64',
+    'riscv32',
+    'riscv64',
+    'rl78',
+    'rx',
+    's390',
+    's390x',
+    'sh4',
+    'sparc',
+    'sparc64',
+    'wasm32',
+    'wasm64',
+    'x86',
+    'x86_64',
+)
+
+# It would feel more natural to call this "64_BIT_CPU_FAMILIES", but
+# python identifiers cannot start with numbers
+CPU_FAMILIES_64_BIT = [
+    'aarch64',
+    'alpha',
+    'ia64',
+    'loongarch64',
+    'mips64',
+    'ppc64',
+    'riscv64',
+    's390x',
+    'sparc64',
+    'wasm64',
+    'x86_64',
+]
+
+# Map from language identifiers to environment variables.
+ENV_VAR_PROG_MAP: T.Mapping[str, str] = {
+    # Compilers
+    'c': 'CC',
+    'cpp': 'CXX',
+    'cs': 'CSC',
+    'd': 'DC',
+    'fortran': 'FC',
+    'objc': 'OBJC',
+    'objcpp': 'OBJCXX',
+    'rust': 'RUSTC',
+    'vala': 'VALAC',
+
+    # Linkers
+    'c_ld': 'CC_LD',
+    'cpp_ld': 'CXX_LD',
+    'd_ld': 'DC_LD',
+    'fortran_ld': 'FC_LD',
+    'objc_ld': 'OBJC_LD',
+    'objcpp_ld': 'OBJCXX_LD',
+    'rust_ld': 'RUSTC_LD',
+
+    # Binutils
+    'strip': 'STRIP',
+    'ar': 'AR',
+    'windres': 'WINDRES',
+
+    # Other tools
+    'cmake': 'CMAKE',
+    'qmake': 'QMAKE',
+    'pkgconfig': 'PKG_CONFIG',
+    'make': 'MAKE',
+}
+
+# Deprecated environment variables mapped from the new variable to the old one
+# Deprecated in 0.54.0
+DEPRECATED_ENV_PROG_MAP: T.Mapping[str, str] = {
+    'd_ld': 'D_LD',
+    'fortran_ld': 'F_LD',
+    'rust_ld': 'RUST_LD',
+    'objcpp_ld': 'OBJCPP_LD',
+}
+
+class CMakeSkipCompilerTest(Enum):
+    ALWAYS = 'always'
+    NEVER = 'never'
+    DEP_ONLY = 'dep_only'
+
+class Properties:
+    def __init__(
+            self,
+            properties: T.Optional[T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]] = None,
+    ):
+        self.properties = properties or {}  # type: T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]
+
+    def has_stdlib(self, language: str) -> bool:
+        return language + '_stdlib' in self.properties
+
+    # Some of get_stdlib, get_root, get_sys_root are wider than is actually
+    # true, but without heterogenious dict annotations it's not practical to
+    # narrow them
+    def get_stdlib(self, language: str) -> T.Union[str, T.List[str]]:
+        stdlib = self.properties[language + '_stdlib']
+        if isinstance(stdlib, str):
+            return stdlib
+        assert isinstance(stdlib, list)
+        for i in stdlib:
+            assert isinstance(i, str)
+        return stdlib
+
+    def get_root(self) -> T.Optional[str]:
+        root = self.properties.get('root', None)
+        assert root is None or isinstance(root, str)
+        return root
+
+    def get_sys_root(self) -> T.Optional[str]:
+        sys_root = self.properties.get('sys_root', None)
+        assert sys_root is None or isinstance(sys_root, str)
+        return sys_root
+
+    def get_pkg_config_libdir(self) -> T.Optional[T.List[str]]:
+        p = self.properties.get('pkg_config_libdir', None)
+        if p is None:
+            return p
+        res = mesonlib.listify(p)
+        for i in res:
+            assert isinstance(i, str)
+        return res
+
+    def get_cmake_defaults(self) -> bool:
+        if 'cmake_defaults' not in self.properties:
+            return True
+        res = self.properties['cmake_defaults']
+        assert isinstance(res, bool)
+        return res
+
+    def get_cmake_toolchain_file(self) -> T.Optional[Path]:
+        if 'cmake_toolchain_file' not in self.properties:
+            return None
+        raw = self.properties['cmake_toolchain_file']
+        assert isinstance(raw, str)
+        cmake_toolchain_file = Path(raw)
+        if not cmake_toolchain_file.is_absolute():
+            raise EnvironmentException(f'cmake_toolchain_file ({raw}) is not absolute')
+        return cmake_toolchain_file
+
+    def get_cmake_skip_compiler_test(self) -> CMakeSkipCompilerTest:
+        if 'cmake_skip_compiler_test' not in self.properties:
+            return CMakeSkipCompilerTest.DEP_ONLY
+        raw = self.properties['cmake_skip_compiler_test']
+        assert isinstance(raw, str)
+        try:
+            return CMakeSkipCompilerTest(raw)
+        except ValueError:
+            raise EnvironmentException(
+                '"{}" is not a valid value for cmake_skip_compiler_test. Supported values are {}'
+                .format(raw, [e.value for e in CMakeSkipCompilerTest]))
+
+    def get_cmake_use_exe_wrapper(self) -> bool:
+        if 'cmake_use_exe_wrapper' not in self.properties:
+            return True
+        res = self.properties['cmake_use_exe_wrapper']
+        assert isinstance(res, bool)
+        return res
+
+    def get_java_home(self) -> T.Optional[Path]:
+        value = T.cast(T.Optional[str], self.properties.get('java_home'))
+        return Path(value) if value else None
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, type(self)):
+            return self.properties == other.properties
+        return NotImplemented
+
+    # TODO consider removing so Properties is less freeform
+    def __getitem__(self, key: str) -> T.Optional[T.Union[str, bool, int, T.List[str]]]:
+        return self.properties[key]
+
+    # TODO consider removing so Properties is less freeform
+    def __contains__(self, item: T.Union[str, bool, int, T.List[str]]) -> bool:
+        return item in self.properties
+
+    # TODO consider removing, for same reasons as above
+    def get(self, key: str, default: T.Optional[T.Union[str, bool, int, T.List[str]]] = None) -> T.Optional[T.Union[str, bool, int, T.List[str]]]:
+        return self.properties.get(key, default)
+
+class MachineInfo(HoldableObject):
+    def __init__(self, system: str, cpu_family: str, cpu: str, endian: str):
+        self.system = system
+        self.cpu_family = cpu_family
+        self.cpu = cpu
+        self.endian = endian
+        self.is_64_bit = cpu_family in CPU_FAMILIES_64_BIT  # type: bool
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, MachineInfo):
+            return NotImplemented
+        return \
+            self.system == other.system and \
+            self.cpu_family == other.cpu_family and \
+            self.cpu == other.cpu and \
+            self.endian == other.endian
+
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, MachineInfo):
+            return NotImplemented
+        return not self.__eq__(other)
+
+    def __repr__(self) -> str:
+        return f''
+
+    @classmethod
+    def from_literal(cls, literal: T.Dict[str, str]) -> 'MachineInfo':
+        minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
+        if set(literal) < minimum_literal:
+            raise EnvironmentException(
+                f'Machine info is currently {literal}\n' +
+                'but is missing {}.'.format(minimum_literal - set(literal)))
+
+        cpu_family = literal['cpu_family']
+        if cpu_family not in known_cpu_families:
+            mlog.warning(f'Unknown CPU family {cpu_family}, please report this at https://github.com/mesonbuild/meson/issues/new')
+
+        endian = literal['endian']
+        if endian not in ('little', 'big'):
+            mlog.warning(f'Unknown endian {endian}')
+
+        return cls(literal['system'], cpu_family, literal['cpu'], endian)
+
+    def is_windows(self) -> bool:
+        """
+        Machine is windows?
+        """
+        return self.system == 'windows'
+
+    def is_cygwin(self) -> bool:
+        """
+        Machine is cygwin?
+        """
+        return self.system == 'cygwin'
+
+    def is_linux(self) -> bool:
+        """
+        Machine is linux?
+        """
+        return self.system == 'linux'
+
+    def is_darwin(self) -> bool:
+        """
+        Machine is Darwin (iOS/tvOS/OS X)?
+        """
+        return self.system in {'darwin', 'ios', 'tvos'}
+
+    def is_android(self) -> bool:
+        """
+        Machine is Android?
+        """
+        return self.system == 'android'
+
+    def is_haiku(self) -> bool:
+        """
+        Machine is Haiku?
+        """
+        return self.system == 'haiku'
+
+    def is_netbsd(self) -> bool:
+        """
+        Machine is NetBSD?
+        """
+        return self.system == 'netbsd'
+
+    def is_openbsd(self) -> bool:
+        """
+        Machine is OpenBSD?
+        """
+        return self.system == 'openbsd'
+
+    def is_dragonflybsd(self) -> bool:
+        """Machine is DragonflyBSD?"""
+        return self.system == 'dragonfly'
+
+    def is_freebsd(self) -> bool:
+        """Machine is FreeBSD?"""
+        return self.system == 'freebsd'
+
+    def is_sunos(self) -> bool:
+        """Machine is illumos or Solaris?"""
+        return self.system == 'sunos'
+
+    def is_hurd(self) -> bool:
+        """
+        Machine is GNU/Hurd?
+        """
+        return self.system == 'gnu'
+
+    def is_irix(self) -> bool:
+        """Machine is IRIX?"""
+        return self.system.startswith('irix')
+
+    # Various prefixes and suffixes for import libraries, shared libraries,
+    # static libraries, and executables.
+    # Versioning is added to these names in the backends as-needed.
+    def get_exe_suffix(self) -> str:
+        if self.is_windows() or self.is_cygwin():
+            return 'exe'
+        else:
+            return ''
+
+    def get_object_suffix(self) -> str:
+        if self.is_windows():
+            return 'obj'
+        else:
+            return 'o'
+
+    def libdir_layout_is_win(self) -> bool:
+        return self.is_windows() or self.is_cygwin()
+
+class BinaryTable:
+
+    def __init__(
+            self,
+            binaries: T.Optional[T.Dict[str, T.Union[str, T.List[str]]]] = None,
+    ):
+        self.binaries: T.Dict[str, T.List[str]] = {}
+        if binaries:
+            for name, command in binaries.items():
+                if not isinstance(command, (list, str)):
+                    raise mesonlib.MesonException(
+                        f'Invalid type {command!r} for entry {name!r} in cross file')
+                self.binaries[name] = mesonlib.listify(command)
+
+    @staticmethod
+    def detect_ccache() -> T.List[str]:
+        try:
+            subprocess.check_call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        except (OSError, subprocess.CalledProcessError):
+            return []
+        return ['ccache']
+
+    @classmethod
+    def parse_entry(cls, entry: T.Union[str, T.List[str]]) -> T.Tuple[T.List[str], T.List[str]]:
+        compiler = mesonlib.stringlistify(entry)
+        # Ensure ccache exists and remove it if it doesn't
+        if compiler[0] == 'ccache':
+            compiler = compiler[1:]
+            ccache = cls.detect_ccache()
+        else:
+            ccache = []
+        # Return value has to be a list of compiler 'choices'
+        return compiler, ccache
+
+    def lookup_entry(self, name: str) -> T.Optional[T.List[str]]:
+        """Lookup binary in cross/native file and fallback to environment.
+
+        Returns command with args as list if found, Returns `None` if nothing is
+        found.
+        """
+        command = self.binaries.get(name)
+        if not command:
+            return None
+        elif not command[0].strip():
+            return None
+        return command
+
+class CMakeVariables:
+    def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None:
+        variables = variables or {}
+        self.variables = {}  # type: T.Dict[str, T.List[str]]
+
+        for key, value in variables.items():
+            value = mesonlib.listify(value)
+            for i in value:
+                assert isinstance(i, str)
+            self.variables[key] = value
+
+    def get_variables(self) -> T.Dict[str, T.List[str]]:
+        return self.variables
diff --git a/meson/mesonbuild/environment.py b/meson/mesonbuild/environment.py
new file mode 100644
index 000000000..c7c53c645
--- /dev/null
+++ b/meson/mesonbuild/environment.py
@@ -0,0 +1,867 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import os, platform, re, sys, shutil
+import typing as T
+import collections
+
+from . import coredata
+from . import mesonlib
+from .mesonlib import (
+    MesonException, EnvironmentException, MachineChoice, Popen_safe, PerMachine,
+    PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey,
+    search_version
+)
+from . import mlog
+from .programs import (
+    ExternalProgram, EmptyExternalProgram
+)
+
+from .envconfig import (
+    BinaryTable, MachineInfo, Properties, known_cpu_families, CMakeVariables,
+)
+from . import compilers
+from .compilers import (
+    Compiler,
+    is_assembly,
+    is_header,
+    is_library,
+    is_llvm_ir,
+    is_object,
+    is_source,
+)
+
+from functools import lru_cache
+from mesonbuild import envconfig
+
+if T.TYPE_CHECKING:
+    from configparser import ConfigParser
+
+    from .dependencies import ExternalProgram
+
+build_filename = 'meson.build'
+
+CompilersDict = T.Dict[str, Compiler]
+
+if T.TYPE_CHECKING:
+    import argparse
+
+
+def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T.Optional[str]:
+    """
+    Returns the exact env var and the value.
+    """
+    candidates = PerMachine(
+        # The prefixed build version takes priority, but if we are native
+        # compiling we fall back on the unprefixed host version. This
+        # allows native builds to never need to worry about the 'BUILD_*'
+        # ones.
+        ([var_name + '_FOR_BUILD'] if is_cross else [var_name]),
+        # Always just the unprefixed host verions
+        [var_name]
+    )[for_machine]
+    for var in candidates:
+        value = os.environ.get(var)
+        if value is not None:
+            break
+    else:
+        formatted = ', '.join([f'{var!r}' for var in candidates])
+        mlog.debug(f'None of {formatted} are defined in the environment, not changing global flags.')
+        return None
+    mlog.debug(f'Using {var!r} from environment with value: {value!r}')
+    return value
+
+
+def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
+    gcovr_exe = 'gcovr'
+    try:
+        p, found = Popen_safe([gcovr_exe, '--version'])[0:2]
+    except (FileNotFoundError, PermissionError):
+        # Doesn't exist in PATH or isn't executable
+        return None, None
+    found = search_version(found)
+    if p.returncode == 0 and mesonlib.version_compare(found, '>=' + min_version):
+        if log:
+            mlog.log('Found gcovr-{} at {}'.format(found, quote_arg(shutil.which(gcovr_exe))))
+        return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version)
+    return None, None
+
+def detect_llvm_cov():
+    tools = get_llvm_tool_names('llvm-cov')
+    for tool in tools:
+        if mesonlib.exe_exists([tool, '--version']):
+            return tool
+    return None
+
+def find_coverage_tools() -> T.Tuple[T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str]]:
+    gcovr_exe, gcovr_new_rootdir = detect_gcovr()
+
+    llvm_cov_exe = detect_llvm_cov()
+
+    lcov_exe = 'lcov'
+    genhtml_exe = 'genhtml'
+
+    if not mesonlib.exe_exists([lcov_exe, '--version']):
+        lcov_exe = None
+    if not mesonlib.exe_exists([genhtml_exe, '--version']):
+        genhtml_exe = None
+
+    return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe
+
+def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.List[str]:
+    r = detect_ninja_command_and_version(version, log)
+    return r[0] if r else None
+
+def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False) -> T.Tuple[T.List[str], str]:
+    env_ninja = os.environ.get('NINJA', None)
+    for n in [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu']:
+        prog = ExternalProgram(n, silent=True)
+        if not prog.found():
+            continue
+        try:
+            p, found = Popen_safe(prog.command + ['--version'])[0:2]
+        except (FileNotFoundError, PermissionError):
+            # Doesn't exist in PATH or isn't executable
+            continue
+        found = found.strip()
+        # Perhaps we should add a way for the caller to know the failure mode
+        # (not found or too old)
+        if p.returncode == 0 and mesonlib.version_compare(found, '>=' + version):
+            if log:
+                name = os.path.basename(n)
+                if name.endswith('-' + found):
+                    name = name[0:-1 - len(found)]
+                if name == 'ninja-build':
+                    name = 'ninja'
+                if name == 'samu':
+                    name = 'samurai'
+                mlog.log('Found {}-{} at {}'.format(name, found,
+                         ' '.join([quote_arg(x) for x in prog.command])))
+            return (prog.command, found)
+
+def get_llvm_tool_names(tool: str) -> T.List[str]:
+    # Ordered list of possible suffixes of LLVM executables to try. Start with
+    # base, then try newest back to oldest (3.5 is arbitrary), and finally the
+    # devel version. Please note that the development snapshot in Debian does
+    # not have a distinct name. Do not move it to the beginning of the list
+    # unless it becomes a stable release.
+    suffixes = [
+        '', # base (no suffix)
+        '-12',  '12',
+        '-11',  '11',
+        '-10',  '10',
+        '-9',   '90',
+        '-8',   '80',
+        '-7',   '70',
+        '-6.0', '60',
+        '-5.0', '50',
+        '-4.0', '40',
+        '-3.9', '39',
+        '-3.8', '38',
+        '-3.7', '37',
+        '-3.6', '36',
+        '-3.5', '35',
+        '-13',    # Debian development snapshot
+        '-devel', # FreeBSD development snapshot
+    ]
+    names = []
+    for suffix in suffixes:
+        names.append(tool + suffix)
+    return names
+
+def detect_scanbuild() -> T.List[str]:
+    """ Look for scan-build binary on build platform
+
+    First, if a SCANBUILD env variable has been provided, give it precedence
+    on all platforms.
+
+    For most platforms, scan-build is found is the PATH contains a binary
+    named "scan-build". However, some distribution's package manager (FreeBSD)
+    don't. For those, loop through a list of candidates to see if one is
+    available.
+
+    Return: a single-element list of the found scan-build binary ready to be
+        passed to Popen()
+    """
+    exelist = []
+    if 'SCANBUILD' in os.environ:
+        exelist = split_args(os.environ['SCANBUILD'])
+
+    else:
+        tools = get_llvm_tool_names('scan-build')
+        for tool in tools:
+            if shutil.which(tool) is not None:
+                exelist = [shutil.which(tool)]
+                break
+
+    if exelist:
+        tool = exelist[0]
+        if os.path.isfile(tool) and os.access(tool, os.X_OK):
+            return [tool]
+    return []
+
+def detect_clangformat() -> T.List[str]:
+    """ Look for clang-format binary on build platform
+
+    Do the same thing as detect_scanbuild to find clang-format except it
+    currently does not check the environment variable.
+
+    Return: a single-element list of the found clang-format binary ready to be
+        passed to Popen()
+    """
+    tools = get_llvm_tool_names('clang-format')
+    for tool in tools:
+        path = shutil.which(tool)
+        if path is not None:
+            return [path]
+    return []
+
+def detect_native_windows_arch():
+    """
+    The architecture of Windows itself: x86, amd64 or arm64
+    """
+    # These env variables are always available. See:
+    # https://msdn.microsoft.com/en-us/library/aa384274(VS.85).aspx
+    # https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/
+    arch = os.environ.get('PROCESSOR_ARCHITEW6432', '').lower()
+    if not arch:
+        try:
+            # If this doesn't exist, something is messing with the environment
+            arch = os.environ['PROCESSOR_ARCHITECTURE'].lower()
+        except KeyError:
+            raise EnvironmentException('Unable to detect native OS architecture')
+    return arch
+
+def detect_windows_arch(compilers: CompilersDict) -> str:
+    """
+    Detecting the 'native' architecture of Windows is not a trivial task. We
+    cannot trust that the architecture that Python is built for is the 'native'
+    one because you can run 32-bit apps on 64-bit Windows using WOW64 and
+    people sometimes install 32-bit Python on 64-bit Windows.
+
+    We also can't rely on the architecture of the OS itself, since it's
+    perfectly normal to compile and run 32-bit applications on Windows as if
+    they were native applications. It's a terrible experience to require the
+    user to supply a cross-info file to compile 32-bit applications on 64-bit
+    Windows. Thankfully, the only way to compile things with Visual Studio on
+    Windows is by entering the 'msvc toolchain' environment, which can be
+    easily detected.
+
+    In the end, the sanest method is as follows:
+    1. Check environment variables that are set by Windows and WOW64 to find out
+       if this is x86 (possibly in WOW64), if so use that as our 'native'
+       architecture.
+    2. If the compiler toolchain target architecture is x86, use that as our
+      'native' architecture.
+    3. Otherwise, use the actual Windows architecture
+
+    """
+    os_arch = detect_native_windows_arch()
+    if os_arch == 'x86':
+        return os_arch
+    # If we're on 64-bit Windows, 32-bit apps can be compiled without
+    # cross-compilation. So if we're doing that, just set the native arch as
+    # 32-bit and pretend like we're running under WOW64. Else, return the
+    # actual Windows architecture that we deduced above.
+    for compiler in compilers.values():
+        if compiler.id == 'msvc' and (compiler.target == 'x86' or compiler.target == '80x86'):
+            return 'x86'
+        if compiler.id == 'clang-cl' and compiler.target == 'x86':
+            return 'x86'
+        if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'):
+            return 'x86'
+    return os_arch
+
+def any_compiler_has_define(compilers: CompilersDict, define):
+    for c in compilers.values():
+        try:
+            if c.has_builtin_define(define):
+                return True
+        except mesonlib.MesonException:
+            # Ignore compilers that do not support has_builtin_define.
+            pass
+    return False
+
+def detect_cpu_family(compilers: CompilersDict) -> str:
+    """
+    Python is inconsistent in its platform module.
+    It returns different values for the same cpu.
+    For x86 it might return 'x86', 'i686' or somesuch.
+    Do some canonicalization.
+    """
+    if mesonlib.is_windows():
+        trial = detect_windows_arch(compilers)
+    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_qnx() or mesonlib.is_aix():
+        trial = platform.processor().lower()
+    else:
+        trial = platform.machine().lower()
+    if trial.startswith('i') and trial.endswith('86'):
+        trial = 'x86'
+    elif trial == 'bepc':
+        trial = 'x86'
+    elif trial == 'arm64':
+        trial = 'aarch64'
+    elif trial.startswith('aarch64'):
+        # This can be `aarch64_be`
+        trial = 'aarch64'
+    elif trial.startswith('arm') or trial.startswith('earm'):
+        trial = 'arm'
+    elif trial.startswith(('powerpc64', 'ppc64')):
+        trial = 'ppc64'
+    elif trial.startswith(('powerpc', 'ppc')) or trial in {'macppc', 'power macintosh'}:
+        trial = 'ppc'
+    elif trial in ('amd64', 'x64', 'i86pc'):
+        trial = 'x86_64'
+    elif trial in {'sun4u', 'sun4v'}:
+        trial = 'sparc64'
+    elif trial.startswith('mips'):
+        if '64' not in trial:
+            trial = 'mips'
+        else:
+            trial = 'mips64'
+    elif trial in {'ip30', 'ip35'}:
+        trial = 'mips64'
+
+    # On Linux (and maybe others) there can be any mixture of 32/64 bit code in
+    # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
+    # reliable way to know is to check the compiler defines.
+    if trial == 'x86_64':
+        if any_compiler_has_define(compilers, '__i386__'):
+            trial = 'x86'
+    elif trial == 'aarch64':
+        if any_compiler_has_define(compilers, '__arm__'):
+            trial = 'arm'
+    # Add more quirks here as bugs are reported. Keep in sync with detect_cpu()
+    # below.
+    elif trial == 'parisc64':
+        # ATM there is no 64 bit userland for PA-RISC. Thus always
+        # report it as 32 bit for simplicity.
+        trial = 'parisc'
+    elif trial == 'ppc':
+        # AIX always returns powerpc, check here for 64-bit
+        if any_compiler_has_define(compilers, '__64BIT__'):
+            trial = 'ppc64'
+
+    if trial not in known_cpu_families:
+        mlog.warning(f'Unknown CPU family {trial!r}, please report this at '
+                     'https://github.com/mesonbuild/meson/issues/new with the '
+                     'output of `uname -a` and `cat /proc/cpuinfo`')
+
+    return trial
+
+def detect_cpu(compilers: CompilersDict) -> str:
+    if mesonlib.is_windows():
+        trial = detect_windows_arch(compilers)
+    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_aix():
+        trial = platform.processor().lower()
+    else:
+        trial = platform.machine().lower()
+
+    if trial in ('amd64', 'x64', 'i86pc'):
+        trial = 'x86_64'
+    if trial == 'x86_64':
+        # Same check as above for cpu_family
+        if any_compiler_has_define(compilers, '__i386__'):
+            trial = 'i686' # All 64 bit cpus have at least this level of x86 support.
+    elif trial.startswith('aarch64'):
+        # Same check as above for cpu_family
+        if any_compiler_has_define(compilers, '__arm__'):
+            trial = 'arm'
+        else:
+            # for aarch64_be
+            trial = 'aarch64'
+    elif trial.startswith('earm'):
+        trial = 'arm'
+    elif trial == 'e2k':
+        # Make more precise CPU detection for Elbrus platform.
+        trial = platform.processor().lower()
+    elif trial.startswith('mips'):
+        if '64' not in trial:
+            trial = 'mips'
+        else:
+            trial = 'mips64'
+    elif trial == 'ppc':
+        # AIX always returns powerpc, check here for 64-bit
+        if any_compiler_has_define(compilers, '__64BIT__'):
+            trial = 'ppc64'
+
+    # Add more quirks here as bugs are reported. Keep in sync with
+    # detect_cpu_family() above.
+    return trial
+
+def detect_system() -> str:
+    if sys.platform == 'cygwin':
+        return 'cygwin'
+    return platform.system().lower()
+
+def detect_msys2_arch() -> T.Optional[str]:
+    return os.environ.get('MSYSTEM_CARCH', None)
+
+def detect_machine_info(compilers: T.Optional[CompilersDict] = None) -> MachineInfo:
+    """Detect the machine we're running on
+
+    If compilers are not provided, we cannot know as much. None out those
+    fields to avoid accidentally depending on partial knowledge. The
+    underlying ''detect_*'' method can be called to explicitly use the
+    partial information.
+    """
+    return MachineInfo(
+        detect_system(),
+        detect_cpu_family(compilers) if compilers is not None else None,
+        detect_cpu(compilers) if compilers is not None else None,
+        sys.byteorder)
+
+# TODO make this compare two `MachineInfo`s purely. How important is the
+# `detect_cpu_family({})` distinction? It is the one impediment to that.
+def machine_info_can_run(machine_info: MachineInfo):
+    """Whether we can run binaries for this machine on the current machine.
+
+    Can almost always run 32-bit binaries on 64-bit natively if the host
+    and build systems are the same. We don't pass any compilers to
+    detect_cpu_family() here because we always want to know the OS
+    architecture, not what the compiler environment tells us.
+    """
+    if machine_info.system != detect_system():
+        return False
+    true_build_cpu_family = detect_cpu_family({})
+    return \
+        (machine_info.cpu_family == true_build_cpu_family) or \
+        ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+        ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
+
+class Environment:
+    private_dir = 'meson-private'
+    log_dir = 'meson-logs'
+    info_dir = 'meson-info'
+
+    def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None:
+        self.source_dir = source_dir
+        self.build_dir = build_dir
+        # Do not try to create build directories when build_dir is none.
+        # This reduced mode is used by the --buildoptions introspector
+        if build_dir is not None:
+            self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
+            self.log_dir = os.path.join(build_dir, Environment.log_dir)
+            self.info_dir = os.path.join(build_dir, Environment.info_dir)
+            os.makedirs(self.scratch_dir, exist_ok=True)
+            os.makedirs(self.log_dir, exist_ok=True)
+            os.makedirs(self.info_dir, exist_ok=True)
+            try:
+                self.coredata = coredata.load(self.get_build_dir())  # type: coredata.CoreData
+                self.first_invocation = False
+            except FileNotFoundError:
+                self.create_new_coredata(options)
+            except coredata.MesonVersionMismatchException as e:
+                # This is routine, but tell the user the update happened
+                mlog.log('Regenerating configuration from scratch:', str(e))
+                coredata.read_cmd_line_file(self.build_dir, options)
+                self.create_new_coredata(options)
+            except MesonException as e:
+                # If we stored previous command line options, we can recover from
+                # a broken/outdated coredata.
+                if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)):
+                    mlog.warning('Regenerating configuration from scratch.')
+                    mlog.log('Reason:', mlog.red(str(e)))
+                    coredata.read_cmd_line_file(self.build_dir, options)
+                    self.create_new_coredata(options)
+                else:
+                    raise e
+        else:
+            # Just create a fresh coredata in this case
+            self.scratch_dir = ''
+            self.create_new_coredata(options)
+
+        ## locally bind some unfrozen configuration
+
+        # Stores machine infos, the only *three* machine one because we have a
+        # target machine info on for the user (Meson never cares about the
+        # target machine.)
+        machines: PerThreeMachineDefaultable[MachineInfo] = PerThreeMachineDefaultable()
+
+        # Similar to coredata.compilers, but lower level in that there is no
+        # meta data, only names/paths.
+        binaries = PerMachineDefaultable()  # type: PerMachineDefaultable[BinaryTable]
+
+        # Misc other properties about each machine.
+        properties = PerMachineDefaultable()  # type: PerMachineDefaultable[Properties]
+
+        # CMake toolchain variables
+        cmakevars = PerMachineDefaultable()  # type: PerMachineDefaultable[CMakeVariables]
+
+        ## Setup build machine defaults
+
+        # Will be fully initialized later using compilers later.
+        machines.build = detect_machine_info()
+
+        # Just uses hard-coded defaults and environment variables. Might be
+        # overwritten by a native file.
+        binaries.build = BinaryTable()
+        properties.build = Properties()
+
+        # Options with the key parsed into an OptionKey type.
+        #
+        # Note that order matters because of 'buildtype', if it is after
+        # 'optimization' and 'debug' keys, it override them.
+        self.options: T.MutableMapping[OptionKey, T.Union[str, T.List[str]]] = collections.OrderedDict()
+
+        ## Read in native file(s) to override build machine configuration
+
+        if self.coredata.config_files is not None:
+            config = coredata.parse_machine_files(self.coredata.config_files)
+            binaries.build = BinaryTable(config.get('binaries', {}))
+            properties.build = Properties(config.get('properties', {}))
+            cmakevars.build = CMakeVariables(config.get('cmake', {}))
+            self._load_machine_file_options(
+                config, properties.build,
+                MachineChoice.BUILD if self.coredata.cross_files else MachineChoice.HOST)
+
+        ## Read in cross file(s) to override host machine configuration
+
+        if self.coredata.cross_files:
+            config = coredata.parse_machine_files(self.coredata.cross_files)
+            properties.host = Properties(config.get('properties', {}))
+            binaries.host = BinaryTable(config.get('binaries', {}))
+            cmakevars.host = CMakeVariables(config.get('cmake', {}))
+            if 'host_machine' in config:
+                machines.host = MachineInfo.from_literal(config['host_machine'])
+            if 'target_machine' in config:
+                machines.target = MachineInfo.from_literal(config['target_machine'])
+            # Keep only per machine options from the native file. The cross
+            # file takes precedence over all other options.
+            for key, value in list(self.options.items()):
+                if self.coredata.is_per_machine_option(key):
+                    self.options[key.as_build()] = value
+            self._load_machine_file_options(config, properties.host, MachineChoice.HOST)
+
+
+        ## "freeze" now initialized configuration, and "save" to the class.
+
+        self.machines = machines.default_missing()
+        self.binaries = binaries.default_missing()
+        self.properties = properties.default_missing()
+        self.cmakevars = cmakevars.default_missing()
+
+        # Command line options override those from cross/native files
+        self.options.update(options.cmd_line_options)
+
+        # Take default value from env if not set in cross/native files or command line.
+        self._set_default_options_from_env()
+        self._set_default_binaries_from_env()
+        self._set_default_properties_from_env()
+
+        # Warn if the user is using two different ways of setting build-type
+        # options that override each other
+        bt = OptionKey('buildtype')
+        db = OptionKey('debug')
+        op = OptionKey('optimization')
+        if bt in self.options and (db in self.options or op in self.options):
+            mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
+                         'Using both is redundant since they override each other. '
+                         'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
+
+        exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
+        if exe_wrapper is not None:
+            self.exe_wrapper = ExternalProgram.from_bin_list(self, MachineChoice.HOST, 'exe_wrapper')
+        else:
+            self.exe_wrapper = None
+
+        self.default_cmake = ['cmake']
+        self.default_pkgconfig = ['pkg-config']
+        self.wrap_resolver = None
+
+    def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None:
+        """Read the contents of a Machine file and put it in the options store."""
+
+        # Look for any options in the deprecated paths section, warn about
+        # those, then assign them. They will be overwritten by the ones in the
+        # "built-in options" section if they're in both sections.
+        paths = config.get('paths')
+        if paths:
+            mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+            for k, v in paths.items():
+                self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+
+        # Next look for compiler options in the "properties" section, this is
+        # also deprecated, and these will also be overwritten by the "built-in
+        # options" section. We need to remove these from this section, as well.
+        deprecated_properties: T.Set[str] = set()
+        for lang in compilers.all_languages:
+            deprecated_properties.add(lang + '_args')
+            deprecated_properties.add(lang + '_link_args')
+        for k, v in properties.properties.copy().items():
+            if k in deprecated_properties:
+                mlog.deprecation(f'{k} in the [properties] section of the machine file is deprecated, use the [built-in options] section.')
+                self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+                del properties.properties[k]
+
+        for section, values in config.items():
+            if ':' in section:
+                subproject, section = section.split(':')
+            else:
+                subproject = ''
+            if section == 'built-in options':
+                for k, v in values.items():
+                    key = OptionKey.from_string(k)
+                    # If we're in the cross file, and there is a `build.foo` warn about that. Later we'll remove it.
+                    if machine is MachineChoice.HOST and key.machine is not machine:
+                        mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 0.60', once=True)
+                    if key.subproject:
+                        raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
+                    self.options[key.evolve(subproject=subproject, machine=machine)] = v
+            elif section == 'project options' and machine is MachineChoice.HOST:
+                # Project options are only for the host machine, we don't want
+                # to read these from the native file
+                for k, v in values.items():
+                    # Project options are always for the host machine
+                    key = OptionKey.from_string(k)
+                    if key.subproject:
+                        raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
+                    self.options[key.evolve(subproject=subproject)] = v
+
+    def _set_default_options_from_env(self) -> None:
+        opts: T.List[T.Tuple[str, str]] = (
+            [(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] +
+            [
+                ('PKG_CONFIG_PATH', 'pkg_config_path'),
+                ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
+                ('LDFLAGS', 'ldflags'),
+                ('CPPFLAGS', 'cppflags'),
+            ]
+        )
+
+        env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list)
+
+        for (evar, keyname), for_machine in itertools.product(opts, MachineChoice):
+            p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+            if p_env is not None:
+                # these may contain duplicates, which must be removed, else
+                # a duplicates-in-array-option warning arises.
+                if keyname == 'cmake_prefix_path':
+                    if self.machines[for_machine].is_windows():
+                        # Cannot split on ':' on Windows because its in the drive letter
+                        _p_env = p_env.split(os.pathsep)
+                    else:
+                        # https://github.com/mesonbuild/meson/issues/7294
+                        _p_env = re.split(r':|;', p_env)
+                    p_list = list(mesonlib.OrderedSet(_p_env))
+                elif keyname == 'pkg_config_path':
+                    p_list = list(mesonlib.OrderedSet(p_env.split(':')))
+                else:
+                    p_list = split_args(p_env)
+                p_list = [e for e in p_list if e]  # filter out any empty elements
+
+                # Take env vars only on first invocation, if the env changes when
+                # reconfiguring it gets ignored.
+                # FIXME: We should remember if we took the value from env to warn
+                # if it changes on future invocations.
+                if self.first_invocation:
+                    if keyname == 'ldflags':
+                        key = OptionKey('link_args', machine=for_machine, lang='c')  # needs a language to initialize properly
+                        for lang in compilers.compilers.LANGUAGES_USING_LDFLAGS:
+                            key = key.evolve(lang=lang)
+                            env_opts[key].extend(p_list)
+                    elif keyname == 'cppflags':
+                        key = OptionKey('env_args', machine=for_machine, lang='c')
+                        for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS:
+                            key = key.evolve(lang=lang)
+                            env_opts[key].extend(p_list)
+                    else:
+                        key = OptionKey.from_string(keyname).evolve(machine=for_machine)
+                        if evar in compilers.compilers.CFLAGS_MAPPING.values():
+                            # If this is an environment variable, we have to
+                            # store it separately until the compiler is
+                            # instantiated, as we don't know whether the
+                            # compiler will want to use these arguments at link
+                            # time and compile time (instead of just at compile
+                            # time) until we're instantiating that `Compiler`
+                            # object. This is required so that passing
+                            # `-Dc_args=` on the command line and `$CFLAGS`
+                            # have subtely differen behavior. `$CFLAGS` will be
+                            # added to the linker command line if the compiler
+                            # acts as a linker driver, `-Dc_args` will not.
+                            #
+                            # We stil use the original key as the base here, as
+                            # we want to inhert the machine and the compiler
+                            # language
+                            key = key.evolve('env_args')
+                        env_opts[key].extend(p_list)
+
+        # Only store options that are not already in self.options,
+        # otherwise we'd override the machine files
+        for k, v in env_opts.items():
+            if k not in self.options:
+                self.options[k] = v
+
+    def _set_default_binaries_from_env(self) -> None:
+        """Set default binaries from the environment.
+
+        For example, pkg-config can be set via PKG_CONFIG, or in the machine
+        file. We want to set the default to the env variable.
+        """
+        opts = itertools.chain(envconfig.DEPRECATED_ENV_PROG_MAP.items(),
+                               envconfig.ENV_VAR_PROG_MAP.items())
+
+        for (name, evar), for_machine in itertools.product(opts, MachineChoice):
+            p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+            if p_env is not None:
+                self.binaries[for_machine].binaries.setdefault(name, mesonlib.split_args(p_env))
+
+    def _set_default_properties_from_env(self) -> None:
+        """Properties which can also be set from the environment."""
+        # name, evar, split
+        opts: T.List[T.Tuple[str, T.List[str], bool]] = [
+            ('boost_includedir', ['BOOST_INCLUDEDIR'], False),
+            ('boost_librarydir', ['BOOST_LIBRARYDIR'], False),
+            ('boost_root', ['BOOST_ROOT', 'BOOSTROOT'], True),
+            ('java_home', ['JAVA_HOME'], False),
+        ]
+
+        for (name, evars, split), for_machine in itertools.product(opts, MachineChoice):
+            for evar in evars:
+                p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+                if p_env is not None:
+                    if split:
+                        self.properties[for_machine].properties.setdefault(name, p_env.split(os.pathsep))
+                    else:
+                        self.properties[for_machine].properties.setdefault(name, p_env)
+                    break
+
+    def create_new_coredata(self, options: 'argparse.Namespace') -> None:
+        # WARNING: Don't use any values from coredata in __init__. It gets
+        # re-initialized with project options by the interpreter during
+        # build file parsing.
+        # meson_command is used by the regenchecker script, which runs meson
+        self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.get_meson_command())
+        self.first_invocation = True
+
+    def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+        return self.coredata.is_cross_build(when_building_for)
+
+    def dump_coredata(self) -> str:
+        return coredata.save(self.coredata, self.get_build_dir())
+
+    def get_log_dir(self) -> str:
+        return self.log_dir
+
+    def get_coredata(self) -> coredata.CoreData:
+        return self.coredata
+
+    def get_build_command(self, unbuffered=False):
+        cmd = mesonlib.get_meson_command().copy()
+        if unbuffered and 'python' in os.path.basename(cmd[0]):
+            cmd.insert(1, '-u')
+        return cmd
+
+    def is_header(self, fname):
+        return is_header(fname)
+
+    def is_source(self, fname):
+        return is_source(fname)
+
+    def is_assembly(self, fname):
+        return is_assembly(fname)
+
+    def is_llvm_ir(self, fname):
+        return is_llvm_ir(fname)
+
+    def is_object(self, fname):
+        return is_object(fname)
+
+    @lru_cache(maxsize=None)
+    def is_library(self, fname):
+        return is_library(fname)
+
+    def lookup_binary_entry(self, for_machine: MachineChoice, name: str) -> T.Optional[T.List[str]]:
+        return self.binaries[for_machine].lookup_entry(name)
+
+    def get_scratch_dir(self) -> str:
+        return self.scratch_dir
+
+    def get_source_dir(self) -> str:
+        return self.source_dir
+
+    def get_build_dir(self) -> str:
+        return self.build_dir
+
+    def get_import_lib_dir(self) -> str:
+        "Install dir for the import library (library used for linking)"
+        return self.get_libdir()
+
+    def get_shared_module_dir(self) -> str:
+        "Install dir for shared modules that are loaded at runtime"
+        return self.get_libdir()
+
+    def get_shared_lib_dir(self) -> str:
+        "Install dir for the shared library"
+        m = self.machines.host
+        # Windows has no RPATH or similar, so DLLs must be next to EXEs.
+        if m.is_windows() or m.is_cygwin():
+            return self.get_bindir()
+        return self.get_libdir()
+
+    def get_static_lib_dir(self) -> str:
+        "Install dir for the static library"
+        return self.get_libdir()
+
+    def get_prefix(self) -> str:
+        return self.coredata.get_option(OptionKey('prefix'))
+
+    def get_libdir(self) -> str:
+        return self.coredata.get_option(OptionKey('libdir'))
+
+    def get_libexecdir(self) -> str:
+        return self.coredata.get_option(OptionKey('libexecdir'))
+
+    def get_bindir(self) -> str:
+        return self.coredata.get_option(OptionKey('bindir'))
+
+    def get_includedir(self) -> str:
+        return self.coredata.get_option(OptionKey('includedir'))
+
+    def get_mandir(self) -> str:
+        return self.coredata.get_option(OptionKey('mandir'))
+
+    def get_datadir(self) -> str:
+        return self.coredata.get_option(OptionKey('datadir'))
+
+    def get_compiler_system_dirs(self, for_machine: MachineChoice):
+        for comp in self.coredata.compilers[for_machine].values():
+            if isinstance(comp, compilers.ClangCompiler):
+                index = 1
+                break
+            elif isinstance(comp, compilers.GnuCompiler):
+                index = 2
+                break
+        else:
+            # This option is only supported by gcc and clang. If we don't get a
+            # GCC or Clang compiler return and empty list.
+            return []
+
+        p, out, _ = Popen_safe(comp.get_exelist() + ['-print-search-dirs'])
+        if p.returncode != 0:
+            raise mesonlib.MesonException('Could not calculate system search dirs')
+        out = out.split('\n')[index].lstrip('libraries: =').split(':')
+        return [os.path.normpath(p) for p in out]
+
+    def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST):
+        value = self.properties[for_machine].get('needs_exe_wrapper', None)
+        if value is not None:
+            return value
+        return not machine_info_can_run(self.machines[for_machine])
+
+    def get_exe_wrapper(self) -> ExternalProgram:
+        if not self.need_exe_wrapper():
+            return EmptyExternalProgram()
+        return self.exe_wrapper
diff --git a/meson/mesonbuild/interpreter/__init__.py b/meson/mesonbuild/interpreter/__init__.py
new file mode 100644
index 000000000..62b09bf37
--- /dev/null
+++ b/meson/mesonbuild/interpreter/__init__.py
@@ -0,0 +1,25 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Meson interpreter."""
+
+from .interpreter import Interpreter, permitted_dependency_kwargs
+from .compiler import CompilerHolder
+from .interpreterobjects import (ExecutableHolder, BuildTargetHolder, CustomTargetHolder,
+                                 CustomTargetIndexHolder, MachineHolder, Test,
+                                 ConfigurationDataObject, SubprojectHolder, DependencyHolder,
+                                 GeneratedListHolder, ExternalProgramHolder,
+                                 extract_required_kwarg)
diff --git a/meson/mesonbuild/interpreter/compiler.py b/meson/mesonbuild/interpreter/compiler.py
new file mode 100644
index 000000000..b1eef2fe5
--- /dev/null
+++ b/meson/mesonbuild/interpreter/compiler.py
@@ -0,0 +1,785 @@
+import functools
+
+from ..interpreterbase.decorators import typed_kwargs, KwargInfo
+
+from .interpreterobjects import (extract_required_kwarg, extract_search_dirs)
+
+from .. import mesonlib
+from .. import mlog
+from .. import dependencies
+from ..interpreterbase import (ObjectHolder, noPosargs, noKwargs, permittedKwargs,
+                               FeatureNew, FeatureNewKwargs, disablerIfNotFound,
+                               check_stringlist, InterpreterException, InvalidArguments)
+
+import typing as T
+import os
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+    from ..compilers import Compiler, RunResult
+
+class TryRunResultHolder(ObjectHolder['RunResult']):
+    def __init__(self, res: 'RunResult', interpreter: 'Interpreter'):
+        super().__init__(res, interpreter)
+        self.methods.update({'returncode': self.returncode_method,
+                             'compiled': self.compiled_method,
+                             'stdout': self.stdout_method,
+                             'stderr': self.stderr_method,
+                             })
+
+    @noPosargs
+    @permittedKwargs({})
+    def returncode_method(self, args, kwargs):
+        return self.held_object.returncode
+
+    @noPosargs
+    @permittedKwargs({})
+    def compiled_method(self, args, kwargs):
+        return self.held_object.compiled
+
+    @noPosargs
+    @permittedKwargs({})
+    def stdout_method(self, args, kwargs):
+        return self.held_object.stdout
+
+    @noPosargs
+    @permittedKwargs({})
+    def stderr_method(self, args, kwargs):
+        return self.held_object.stderr
+
+header_permitted_kwargs = {
+    'required',
+    'prefix',
+    'no_builtin_args',
+    'include_directories',
+    'args',
+    'dependencies',
+}
+
+find_library_permitted_kwargs = {
+    'has_headers',
+    'required',
+    'dirs',
+    'static',
+}
+
+find_library_permitted_kwargs |= {'header_' + k for k in header_permitted_kwargs}
+
+class CompilerHolder(ObjectHolder['Compiler']):
+    def __init__(self, compiler: 'Compiler', interpreter: 'Interpreter'):
+        super().__init__(compiler, interpreter)
+        self.environment = self.env
+        self.methods.update({'compiles': self.compiles_method,
+                             'links': self.links_method,
+                             'get_id': self.get_id_method,
+                             'get_linker_id': self.get_linker_id_method,
+                             'compute_int': self.compute_int_method,
+                             'sizeof': self.sizeof_method,
+                             'get_define': self.get_define_method,
+                             'check_header': self.check_header_method,
+                             'has_header': self.has_header_method,
+                             'has_header_symbol': self.has_header_symbol_method,
+                             'run': self.run_method,
+                             'has_function': self.has_function_method,
+                             'has_member': self.has_member_method,
+                             'has_members': self.has_members_method,
+                             'has_type': self.has_type_method,
+                             'alignment': self.alignment_method,
+                             'version': self.version_method,
+                             'cmd_array': self.cmd_array_method,
+                             'find_library': self.find_library_method,
+                             'has_argument': self.has_argument_method,
+                             'has_function_attribute': self.has_func_attribute_method,
+                             'get_supported_function_attributes': self.get_supported_function_attributes_method,
+                             'has_multi_arguments': self.has_multi_arguments_method,
+                             'get_supported_arguments': self.get_supported_arguments_method,
+                             'first_supported_argument': self.first_supported_argument_method,
+                             'has_link_argument': self.has_link_argument_method,
+                             'has_multi_link_arguments': self.has_multi_link_arguments_method,
+                             'get_supported_link_arguments': self.get_supported_link_arguments_method,
+                             'first_supported_link_argument': self.first_supported_link_argument_method,
+                             'unittest_args': self.unittest_args_method,
+                             'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
+                             'get_argument_syntax': self.get_argument_syntax_method,
+                             })
+
+    @property
+    def compiler(self) -> 'Compiler':
+        return self.held_object
+
+    def _dep_msg(self, deps, endl):
+        msg_single = 'with dependency {}'
+        msg_many = 'with dependencies {}'
+        if not deps:
+            return endl
+        if endl is None:
+            endl = ''
+        names = []
+        for d in deps:
+            if isinstance(d, dependencies.InternalDependency):
+                continue
+            if isinstance(d, dependencies.ExternalLibrary):
+                name = '-l' + d.name
+            else:
+                name = d.name
+            names.append(name)
+        if not names:
+            return None
+        tpl = msg_many if len(names) > 1 else msg_single
+        return tpl.format(', '.join(names)) + endl
+
+    @noPosargs
+    @permittedKwargs({})
+    def version_method(self, args, kwargs):
+        return self.compiler.version
+
+    @noPosargs
+    @permittedKwargs({})
+    def cmd_array_method(self, args, kwargs):
+        return self.compiler.exelist
+
+    def determine_args(self, kwargs, mode='link'):
+        nobuiltins = kwargs.get('no_builtin_args', False)
+        if not isinstance(nobuiltins, bool):
+            raise InterpreterException('Type of no_builtin_args not a boolean.')
+        args = []
+        incdirs = mesonlib.extract_as_list(kwargs, 'include_directories')
+        for i in incdirs:
+            from ..build import IncludeDirs
+            if not isinstance(i, IncludeDirs):
+                raise InterpreterException('Include directories argument must be an include_directories object.')
+            for idir in i.to_string_list(self.environment.get_source_dir()):
+                args += self.compiler.get_include_args(idir, False)
+        if not nobuiltins:
+            opts = self.environment.coredata.options
+            args += self.compiler.get_option_compile_args(opts)
+            if mode == 'link':
+                args += self.compiler.get_option_link_args(opts)
+        args += mesonlib.stringlistify(kwargs.get('args', []))
+        return args
+
+    def determine_dependencies(self, kwargs, endl=':'):
+        deps = kwargs.get('dependencies', None)
+        if deps is not None:
+            final_deps = []
+            while deps:
+                next_deps = []
+                for d in mesonlib.listify(deps):
+                    if not isinstance(d, dependencies.Dependency) or d.is_built():
+                        raise InterpreterException('Dependencies must be external dependencies')
+                    final_deps.append(d)
+                    next_deps.extend(d.ext_deps)
+                deps = next_deps
+            deps = final_deps
+        return deps, self._dep_msg(deps, endl)
+
+    @permittedKwargs({
+        'prefix',
+        'args',
+        'dependencies',
+    })
+    def alignment_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Alignment method takes exactly one positional argument.')
+        check_stringlist(args)
+        typename = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of alignment must be a string.')
+        extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+        deps, msg = self.determine_dependencies(kwargs)
+        result = self.compiler.alignment(typename, prefix, self.environment,
+                                         extra_args=extra_args,
+                                         dependencies=deps)
+        mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result)
+        return result
+
+    @permittedKwargs({
+        'name',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def run_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Run method takes exactly one positional argument.')
+        code = args[0]
+        if isinstance(code, mesonlib.File):
+            code = mesonlib.File.from_absolute_file(
+                code.rel_to_builddir(self.environment.source_dir))
+        elif not isinstance(code, str):
+            raise InvalidArguments('Argument must be string or file.')
+        testname = kwargs.get('name', '')
+        if not isinstance(testname, str):
+            raise InterpreterException('Testname argument must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs, endl=None)
+        result = self.compiler.run(code, self.environment, extra_args=extra_args,
+                                   dependencies=deps)
+        if len(testname) > 0:
+            if not result.compiled:
+                h = mlog.red('DID NOT COMPILE')
+            elif result.returncode == 0:
+                h = mlog.green('YES')
+            else:
+                h = mlog.red('NO (%d)' % result.returncode)
+            mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h)
+        return result
+
+    @noPosargs
+    @permittedKwargs({})
+    def get_id_method(self, args, kwargs):
+        return self.compiler.get_id()
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('compiler.get_linker_id', '0.53.0')
+    def get_linker_id_method(self, args, kwargs):
+        return self.compiler.get_linker_id()
+
+    @noPosargs
+    @permittedKwargs({})
+    def symbols_have_underscore_prefix_method(self, args, kwargs):
+        '''
+        Check if the compiler prefixes _ (underscore) to global C symbols
+        See: https://en.wikipedia.org/wiki/Name_mangling#C
+        '''
+        return self.compiler.symbols_have_underscore_prefix(self.environment)
+
+    @noPosargs
+    @permittedKwargs({})
+    def unittest_args_method(self, args, kwargs):
+        '''
+        This function is deprecated and should not be used.
+        It can be removed in a future version of Meson.
+        '''
+        if not hasattr(self.compiler, 'get_feature_args'):
+            raise InterpreterException(f'This {self.compiler.get_display_language()} compiler has no feature arguments.')
+        build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir())
+        return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src)
+
+    @permittedKwargs({
+        'prefix',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def has_member_method(self, args, kwargs):
+        if len(args) != 2:
+            raise InterpreterException('Has_member takes exactly two arguments.')
+        check_stringlist(args)
+        typename, membername = args
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_member must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        had, cached = self.compiler.has_members(typename, [membername], prefix,
+                                                self.environment,
+                                                extra_args=extra_args,
+                                                dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        mlog.log('Checking whether type', mlog.bold(typename, True),
+                 'has member', mlog.bold(membername, True), msg, hadtxt, cached)
+        return had
+
+    @permittedKwargs({
+        'prefix',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def has_members_method(self, args, kwargs):
+        if len(args) < 2:
+            raise InterpreterException('Has_members needs at least two arguments.')
+        check_stringlist(args)
+        typename, *membernames = args
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_members must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        had, cached = self.compiler.has_members(typename, membernames, prefix,
+                                                self.environment,
+                                                extra_args=extra_args,
+                                                dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        members = mlog.bold(', '.join([f'"{m}"' for m in membernames]))
+        mlog.log('Checking whether type', mlog.bold(typename, True),
+                 'has members', members, msg, hadtxt, cached)
+        return had
+
+    @permittedKwargs({
+        'prefix',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def has_function_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Has_function takes exactly one argument.')
+        check_stringlist(args)
+        funcname = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_function must be a string.')
+        extra_args = self.determine_args(kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        had, cached = self.compiler.has_function(funcname, prefix, self.environment,
+                                                 extra_args=extra_args,
+                                                 dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt, cached)
+        return had
+
+    @permittedKwargs({
+        'prefix',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def has_type_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Has_type takes exactly one argument.')
+        check_stringlist(args)
+        typename = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_type must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        had, cached = self.compiler.has_type(typename, prefix, self.environment,
+                                             extra_args=extra_args, dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt, cached)
+        return had
+
+    @FeatureNew('compiler.compute_int', '0.40.0')
+    @permittedKwargs({
+        'prefix',
+        'low',
+        'high',
+        'guess',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def compute_int_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Compute_int takes exactly one argument.')
+        check_stringlist(args)
+        expression = args[0]
+        prefix = kwargs.get('prefix', '')
+        low = kwargs.get('low', None)
+        high = kwargs.get('high', None)
+        guess = kwargs.get('guess', None)
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of compute_int must be a string.')
+        if low is not None and not isinstance(low, int):
+            raise InterpreterException('Low argument of compute_int must be an int.')
+        if high is not None and not isinstance(high, int):
+            raise InterpreterException('High argument of compute_int must be an int.')
+        if guess is not None and not isinstance(guess, int):
+            raise InterpreterException('Guess argument of compute_int must be an int.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        res = self.compiler.compute_int(expression, low, high, guess, prefix,
+                                        self.environment, extra_args=extra_args,
+                                        dependencies=deps)
+        mlog.log('Computing int of', mlog.bold(expression, True), msg, res)
+        return res
+
+    @permittedKwargs({
+        'prefix',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def sizeof_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Sizeof takes exactly one argument.')
+        check_stringlist(args)
+        element = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of sizeof must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        esize = self.compiler.sizeof(element, prefix, self.environment,
+                                     extra_args=extra_args, dependencies=deps)
+        mlog.log('Checking for size of', mlog.bold(element, True), msg, esize)
+        return esize
+
+    @FeatureNew('compiler.get_define', '0.40.0')
+    @permittedKwargs({
+        'prefix',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def get_define_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('get_define() takes exactly one argument.')
+        check_stringlist(args)
+        element = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of get_define() must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        value, cached = self.compiler.get_define(element, prefix, self.environment,
+                                                 extra_args=extra_args,
+                                                 dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        mlog.log('Fetching value of define', mlog.bold(element, True), msg, value, cached)
+        return value
+
+    @permittedKwargs({
+        'name',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def compiles_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('compiles method takes exactly one argument.')
+        code = args[0]
+        if isinstance(code, mesonlib.File):
+            code = mesonlib.File.from_absolute_file(
+                code.rel_to_builddir(self.environment.source_dir))
+        elif not isinstance(code, str):
+            raise InvalidArguments('Argument must be string or file.')
+        testname = kwargs.get('name', '')
+        if not isinstance(testname, str):
+            raise InterpreterException('Testname argument must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs, endl=None)
+        result, cached = self.compiler.compiles(code, self.environment,
+                                                extra_args=extra_args,
+                                                dependencies=deps)
+        if len(testname) > 0:
+            if result:
+                h = mlog.green('YES')
+            else:
+                h = mlog.red('NO')
+            cached = mlog.blue('(cached)') if cached else ''
+            mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h, cached)
+        return result
+
+    @permittedKwargs({
+        'name',
+        'no_builtin_args',
+        'include_directories',
+        'args',
+        'dependencies',
+    })
+    def links_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('links method takes exactly one argument.')
+        code = args[0]
+        if isinstance(code, mesonlib.File):
+            code = mesonlib.File.from_absolute_file(
+                code.rel_to_builddir(self.environment.source_dir))
+        elif not isinstance(code, str):
+            raise InvalidArguments('Argument must be string or file.')
+        testname = kwargs.get('name', '')
+        if not isinstance(testname, str):
+            raise InterpreterException('Testname argument must be a string.')
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs, endl=None)
+        result, cached = self.compiler.links(code, self.environment,
+                                             extra_args=extra_args,
+                                             dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if len(testname) > 0:
+            if result:
+                h = mlog.green('YES')
+            else:
+                h = mlog.red('NO')
+            mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h, cached)
+        return result
+
+    @FeatureNew('compiler.check_header', '0.47.0')
+    @FeatureNewKwargs('compiler.check_header', '0.50.0', ['required'])
+    @permittedKwargs(header_permitted_kwargs)
+    def check_header_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('check_header method takes exactly one argument.')
+        check_stringlist(args)
+        hname = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_header must be a string.')
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+        if disabled:
+            mlog.log('Check usable header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        haz, cached = self.compiler.check_header(hname, prefix, self.environment,
+                                                 extra_args=extra_args,
+                                                 dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if required and not haz:
+            raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not usable')
+        elif haz:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        mlog.log('Check usable header', mlog.bold(hname, True), msg, h, cached)
+        return haz
+
+    @FeatureNewKwargs('compiler.has_header', '0.50.0', ['required'])
+    @permittedKwargs(header_permitted_kwargs)
+    def has_header_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('has_header method takes exactly one argument.')
+        check_stringlist(args)
+        hname = args[0]
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_header must be a string.')
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+        if disabled:
+            mlog.log('Has header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        haz, cached = self.compiler.has_header(hname, prefix, self.environment,
+                                               extra_args=extra_args, dependencies=deps)
+        cached = mlog.blue('(cached)') if cached else ''
+        if required and not haz:
+            raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not found')
+        elif haz:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        mlog.log('Has header', mlog.bold(hname, True), msg, h, cached)
+        return haz
+
+    @FeatureNewKwargs('compiler.has_header_symbol', '0.50.0', ['required'])
+    @permittedKwargs(header_permitted_kwargs)
+    def has_header_symbol_method(self, args, kwargs):
+        if len(args) != 2:
+            raise InterpreterException('has_header_symbol method takes exactly two arguments.')
+        check_stringlist(args)
+        hname, symbol = args
+        prefix = kwargs.get('prefix', '')
+        if not isinstance(prefix, str):
+            raise InterpreterException('Prefix argument of has_header_symbol must be a string.')
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+        if disabled:
+            mlog.log(f'Header <{hname}> has symbol', mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        extra_args = functools.partial(self.determine_args, kwargs)
+        deps, msg = self.determine_dependencies(kwargs)
+        haz, cached = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment,
+                                                      extra_args=extra_args,
+                                                      dependencies=deps)
+        if required and not haz:
+            raise InterpreterException(f'{self.compiler.get_display_language()} symbol {symbol} not found in header {hname}')
+        elif haz:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        cached = mlog.blue('(cached)') if cached else ''
+        mlog.log(f'Header <{hname}> has symbol', mlog.bold(symbol, True), msg, h, cached)
+        return haz
+
+    def notfound_library(self, libname):
+        lib = dependencies.ExternalLibrary(libname, None,
+                                           self.environment,
+                                           self.compiler.language,
+                                           silent=True)
+        return lib
+
+    @FeatureNewKwargs('compiler.find_library', '0.51.0', ['static'])
+    @FeatureNewKwargs('compiler.find_library', '0.50.0', ['has_headers'])
+    @FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler'])
+    @disablerIfNotFound
+    @permittedKwargs(find_library_permitted_kwargs)
+    def find_library_method(self, args, kwargs):
+        # TODO add dependencies support?
+        if len(args) != 1:
+            raise InterpreterException('find_library method takes one argument.')
+        libname = args[0]
+        if not isinstance(libname, str):
+            raise InterpreterException('Library name not a string.')
+
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.notfound_library(libname)
+
+        has_header_kwargs = {k[7:]: v for k, v in kwargs.items() if k.startswith('header_')}
+        has_header_kwargs['required'] = required
+        headers = mesonlib.stringlistify(kwargs.get('has_headers', []))
+        for h in headers:
+            if not self.has_header_method([h], has_header_kwargs):
+                return self.notfound_library(libname)
+
+        search_dirs = extract_search_dirs(kwargs)
+
+        libtype = mesonlib.LibType.PREFER_SHARED
+        if 'static' in kwargs:
+            if not isinstance(kwargs['static'], bool):
+                raise InterpreterException('static must be a boolean')
+            libtype = mesonlib.LibType.STATIC if kwargs['static'] else mesonlib.LibType.SHARED
+        linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype)
+        if required and not linkargs:
+            if libtype == mesonlib.LibType.PREFER_SHARED:
+                libtype = 'shared or static'
+            else:
+                libtype = libtype.name.lower()
+            raise InterpreterException('{} {} library {!r} not found'
+                                       .format(self.compiler.get_display_language(),
+                                               libtype, libname))
+        lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
+                                           self.compiler.language)
+        return lib
+
+    @permittedKwargs({})
+    def has_argument_method(self, args: T.Sequence[str], kwargs) -> bool:
+        args = mesonlib.stringlistify(args)
+        if len(args) != 1:
+            raise InterpreterException('has_argument takes exactly one argument.')
+        return self.has_multi_arguments_method(args, kwargs)
+
+    @permittedKwargs({})
+    def has_multi_arguments_method(self, args: T.Sequence[str], kwargs: dict):
+        args = mesonlib.stringlistify(args)
+        result, cached = self.compiler.has_multi_arguments(args, self.environment)
+        if result:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        cached = mlog.blue('(cached)') if cached else ''
+        mlog.log(
+            'Compiler for {} supports arguments {}:'.format(
+                self.compiler.get_display_language(), ' '.join(args)),
+            h, cached)
+        return result
+
+    @FeatureNew('compiler.get_supported_arguments', '0.43.0')
+    @typed_kwargs(
+        'compiler.get_supported_arguments',
+        KwargInfo('checked', str, default='off', since='0.59.0',
+                  validator=lambda s: 'must be one of "warn", "require" or "off"' if s not in ['warn', 'require', 'off'] else None)
+    )
+    def get_supported_arguments_method(self, args: T.Sequence[str], kwargs: T.Dict[str, T.Any]):
+        args = mesonlib.stringlistify(args)
+        supported_args = []
+        checked = kwargs.pop('checked')
+
+        for arg in args:
+            if not self.has_argument_method(arg, kwargs):
+                msg = f'Compiler for {self.compiler.get_display_language()} does not support "{arg}"'
+                if checked == 'warn':
+                    mlog.warning(msg)
+                elif checked == 'require':
+                    raise mesonlib.MesonException(msg)
+            else:
+                supported_args.append(arg)
+        return supported_args
+
+    @permittedKwargs({})
+    def first_supported_argument_method(self, args: T.Sequence[str], kwargs: dict) -> T.List[str]:
+        for arg in mesonlib.stringlistify(args):
+            if self.has_argument_method(arg, kwargs):
+                mlog.log('First supported argument:', mlog.bold(arg))
+                return [arg]
+        mlog.log('First supported argument:', mlog.red('None'))
+        return []
+
+    @FeatureNew('compiler.has_link_argument', '0.46.0')
+    @permittedKwargs({})
+    def has_link_argument_method(self, args, kwargs):
+        args = mesonlib.stringlistify(args)
+        if len(args) != 1:
+            raise InterpreterException('has_link_argument takes exactly one argument.')
+        return self.has_multi_link_arguments_method(args, kwargs)
+
+    @FeatureNew('compiler.has_multi_link_argument', '0.46.0')
+    @permittedKwargs({})
+    def has_multi_link_arguments_method(self, args, kwargs):
+        args = mesonlib.stringlistify(args)
+        result, cached = self.compiler.has_multi_link_arguments(args, self.environment)
+        cached = mlog.blue('(cached)') if cached else ''
+        if result:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        mlog.log(
+            'Compiler for {} supports link arguments {}:'.format(
+                self.compiler.get_display_language(), ' '.join(args)),
+            h, cached)
+        return result
+
+    @FeatureNew('compiler.get_supported_link_arguments_method', '0.46.0')
+    @permittedKwargs({})
+    def get_supported_link_arguments_method(self, args, kwargs):
+        args = mesonlib.stringlistify(args)
+        supported_args = []
+        for arg in args:
+            if self.has_link_argument_method(arg, kwargs):
+                supported_args.append(arg)
+        return supported_args
+
+    @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0')
+    @permittedKwargs({})
+    def first_supported_link_argument_method(self, args, kwargs):
+        for i in mesonlib.stringlistify(args):
+            if self.has_link_argument_method(i, kwargs):
+                mlog.log('First supported link argument:', mlog.bold(i))
+                return [i]
+        mlog.log('First supported link argument:', mlog.red('None'))
+        return []
+
+    @FeatureNew('compiler.has_function_attribute', '0.48.0')
+    @permittedKwargs({})
+    def has_func_attribute_method(self, args, kwargs):
+        args = mesonlib.stringlistify(args)
+        if len(args) != 1:
+            raise InterpreterException('has_func_attribute takes exactly one argument.')
+        result, cached = self.compiler.has_func_attribute(args[0], self.environment)
+        cached = mlog.blue('(cached)') if cached else ''
+        h = mlog.green('YES') if result else mlog.red('NO')
+        mlog.log('Compiler for {} supports function attribute {}:'.format(self.compiler.get_display_language(), args[0]), h, cached)
+        return result
+
+    @FeatureNew('compiler.get_supported_function_attributes', '0.48.0')
+    @permittedKwargs({})
+    def get_supported_function_attributes_method(self, args, kwargs):
+        args = mesonlib.stringlistify(args)
+        return [a for a in args if self.has_func_attribute_method(a, kwargs)]
+
+    @FeatureNew('compiler.get_argument_syntax_method', '0.49.0')
+    @noPosargs
+    @noKwargs
+    def get_argument_syntax_method(self, args, kwargs):
+        return self.compiler.get_argument_syntax()
diff --git a/meson/mesonbuild/interpreter/dependencyfallbacks.py b/meson/mesonbuild/interpreter/dependencyfallbacks.py
new file mode 100644
index 000000000..180209aa1
--- /dev/null
+++ b/meson/mesonbuild/interpreter/dependencyfallbacks.py
@@ -0,0 +1,351 @@
+from .interpreterobjects import SubprojectHolder, extract_required_kwarg
+
+from .. import mlog
+from .. import dependencies
+from .. import build
+from ..wrap import WrapMode
+from ..mesonlib import OptionKey, extract_as_list, stringlistify, version_compare_many
+from ..dependencies import Dependency, DependencyException, NotFoundDependency
+from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
+                               InterpreterException, InvalidArguments,
+                               TYPE_nkwargs, TYPE_nvar)
+
+import typing as T
+if T.TYPE_CHECKING:
+    from .interpreter import Interpreter
+
+
+class DependencyFallbacksHolder(MesonInterpreterObject):
+    def __init__(self, interpreter: 'Interpreter', names: T.List[str], allow_fallback: T.Optional[bool] = None,
+                 default_options: T.Optional[T.List[str]] = None) -> None:
+        super().__init__(subproject=interpreter.subproject)
+        self.interpreter = interpreter
+        self.subproject = interpreter.subproject
+        self.coredata = interpreter.coredata
+        self.build = interpreter.build
+        self.environment = interpreter.environment
+        self.wrap_resolver = interpreter.environment.wrap_resolver
+        self.allow_fallback = allow_fallback
+        self.subproject_name = None
+        self.subproject_varname = None
+        self.subproject_kwargs = {'default_options': default_options or []}
+        self.names: T.List[str] = []
+        for name in names:
+            if not name:
+                raise InterpreterException('dependency_fallbacks empty name \'\' is not allowed')
+            if '<' in name or '>' in name or '=' in name:
+                raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
+                                       'version\n requirements use the \'version\' keyword argument instead.')
+            if name in self.names:
+                raise InterpreterException('dependency_fallbacks name {name!r} is duplicated')
+            self.names.append(name)
+
+    def set_fallback(self, fbinfo: T.Optional[T.Union[T.List[str], str]]) -> None:
+        # Legacy: This converts dependency()'s fallback kwargs.
+        if fbinfo is None:
+            return
+        if self.allow_fallback is not None:
+            raise InvalidArguments('"fallback" and "allow_fallback" arguments are mutually exclusive')
+        fbinfo = stringlistify(fbinfo)
+        if len(fbinfo) == 0:
+            # dependency('foo', fallback: []) is the same as dependency('foo', allow_fallback: false)
+            self.allow_fallback = False
+            return
+        if len(fbinfo) == 1:
+            FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
+            subp_name, varname = fbinfo[0], None
+        elif len(fbinfo) == 2:
+            subp_name, varname = fbinfo
+        else:
+            raise InterpreterException('Fallback info must have one or two items.')
+        self._subproject_impl(subp_name, varname)
+
+    def _subproject_impl(self, subp_name: str, varname: str) -> None:
+        if not varname:
+            # If no variable name is specified, check if the wrap file has one.
+            # If the wrap file has a variable name, better use it because the
+            # subproject most probably is not using meson.override_dependency().
+            for name in self.names:
+                varname = self.wrap_resolver.get_varname(subp_name, name)
+                if varname:
+                    break
+        assert self.subproject_name is None
+        self.subproject_name = subp_name
+        self.subproject_varname = varname
+
+    def _do_dependency_cache(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        name = func_args[0]
+        cached_dep = self._get_cached_dep(name, kwargs)
+        if cached_dep:
+            self._verify_fallback_consistency(cached_dep)
+        return cached_dep
+
+    def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        # Note that there is no df.dependency() method, this is called for names
+        # given as positional arguments to dependency_fallbacks(name1, ...).
+        # We use kwargs from the dependency() function, for things like version,
+        # module, etc.
+        name = func_args[0]
+        self._handle_featurenew_dependencies(name)
+        dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+        if dep.found():
+            for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+            identifier = dependencies.get_dep_identifier(name, kwargs)
+            self.coredata.deps[for_machine].put(identifier, dep)
+            return dep
+        return None
+
+    def _do_existing_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        subp_name = func_args[0]
+        varname = self.subproject_varname
+        if subp_name and self._get_subproject(subp_name):
+            return self._get_subproject_dep(subp_name, varname, kwargs)
+        return None
+
+    def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        if self.forcefallback:
+            mlog.log('Looking for a fallback subproject for the dependency',
+                     mlog.bold(self.display_name), 'because:\nUse of fallback dependencies is forced.')
+        elif self.nofallback:
+            mlog.log('Not looking for a fallback subproject for the dependency',
+                     mlog.bold(self.display_name), 'because:\nUse of fallback dependencies is disabled.')
+            return None
+        else:
+            mlog.log('Looking for a fallback subproject for the dependency',
+                     mlog.bold(self.display_name))
+
+        # Configure the subproject
+        subp_name = self.subproject_name
+        varname = self.subproject_varname
+        self.interpreter.do_subproject(subp_name, 'meson', func_kwargs)
+        return self._get_subproject_dep(subp_name, varname, kwargs)
+
+    def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
+        sub = self.interpreter.subprojects.get(subp_name)
+        if sub and sub.found():
+            return sub
+        return None
+
+    def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        # Verify the subproject is found
+        subproject = self._get_subproject(subp_name)
+        if not subproject:
+            mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+                     mlog.bold(subp_name), 'found:', mlog.red('NO'),
+                     mlog.blue('(subproject failed to configure)'))
+            return None
+
+        # The subproject has been configured. If for any reason the dependency
+        # cannot be found in this subproject we have to return not-found object
+        # instead of None, because we don't want to continue the lookup on the
+        # system.
+
+        # Check if the subproject overridden at least one of the names we got.
+        cached_dep = None
+        for name in self.names:
+            cached_dep = self._get_cached_dep(name, kwargs)
+            if cached_dep:
+                break
+
+        # If we have cached_dep we did all the checks and logging already in
+        # self._get_cached_dep().
+        if cached_dep:
+            self._verify_fallback_consistency(cached_dep)
+            return cached_dep
+
+        # Legacy: Use the variable name if provided instead of relying on the
+        # subproject to override one of our dependency names
+        if not varname:
+            mlog.warning(f'Subproject {subp_name!r} did not override {self.display_name!r} dependency and no variable name specified')
+            mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+            return self._notfound_dependency()
+
+        var_dep = self._get_subproject_variable(subproject, varname) or self._notfound_dependency()
+        if not var_dep.found():
+            mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+            return var_dep
+
+        wanted = stringlistify(kwargs.get('version', []))
+        found = var_dep.get_version()
+        if not self._check_version(wanted, found):
+            mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'),
+                     'found', mlog.normal_cyan(found), 'but need:',
+                     mlog.bold(', '.join([f"'{e}'" for e in wanted])))
+            return self._notfound_dependency()
+
+        mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+                 mlog.bold(subproject.subdir), 'found:', mlog.green('YES'),
+                 mlog.normal_cyan(found) if found else None)
+        return var_dep
+
+    def _get_cached_dep(self, name: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        # Unlike other methods, this one returns not-found dependency instead
+        # of None in the case the dependency is cached as not-found, or if cached
+        # version does not match. In that case we don't want to continue with
+        # other candidates.
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        identifier = dependencies.get_dep_identifier(name, kwargs)
+        wanted_vers = stringlistify(kwargs.get('version', []))
+
+        override = self.build.dependency_overrides[for_machine].get(identifier)
+        if override:
+            info = [mlog.blue('(overridden)' if override.explicit else '(cached)')]
+            cached_dep = override.dep
+            # We don't implicitly override not-found dependencies, but user could
+            # have explicitly called meson.override_dependency() with a not-found
+            # dep.
+            if not cached_dep.found():
+                mlog.log('Dependency', mlog.bold(self.display_name),
+                         'found:', mlog.red('NO'), *info)
+                return cached_dep
+        else:
+            info = [mlog.blue('(cached)')]
+            cached_dep = self.coredata.deps[for_machine].get(identifier)
+
+        if cached_dep:
+            found_vers = cached_dep.get_version()
+            if not self._check_version(wanted_vers, found_vers):
+                mlog.log('Dependency', mlog.bold(name),
+                         'found:', mlog.red('NO'),
+                         'found', mlog.normal_cyan(found_vers), 'but need:',
+                         mlog.bold(', '.join([f"'{e}'" for e in wanted_vers])),
+                         *info)
+                return self._notfound_dependency()
+            if found_vers:
+                info = [mlog.normal_cyan(found_vers), *info]
+            mlog.log('Dependency', mlog.bold(self.display_name),
+                     'found:', mlog.green('YES'), *info)
+            return cached_dep
+        return None
+
+    def _get_subproject_variable(self, subproject: SubprojectHolder, varname: str) -> T.Optional[Dependency]:
+        try:
+            var_dep = subproject.get_variable_method([varname], {})
+        except InvalidArguments:
+            var_dep = None
+        if not isinstance(var_dep, Dependency):
+            mlog.warning(f'Variable {varname!r} in the subproject {subproject.subdir!r} is',
+                         'not found' if var_dep is None else 'not a dependency object')
+            return None
+        return var_dep
+
+    def _verify_fallback_consistency(self, cached_dep: Dependency):
+        subp_name = self.subproject_name
+        varname = self.subproject_varname
+        subproject = self._get_subproject(subp_name)
+        if subproject and varname:
+            var_dep = self._get_subproject_variable(subproject, varname)
+            if var_dep and cached_dep.found() and var_dep != cached_dep:
+                mlog.warning(f'Inconsistency: Subproject has overridden the dependency with another variable than {varname!r}')
+
+    def _handle_featurenew_dependencies(self, name: str) -> None:
+        'Do a feature check on dependencies used by this subproject'
+        if name == 'mpi':
+            FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
+        elif name == 'pcap':
+            FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
+        elif name == 'vulkan':
+            FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
+        elif name == 'libwmf':
+            FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
+        elif name == 'openmp':
+            FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
+
+    def _notfound_dependency(self) -> NotFoundDependency:
+        return NotFoundDependency(self.environment)
+
+    @staticmethod
+    def _check_version(wanted: T.Optional[str], found: str) -> bool:
+        if not wanted:
+            return True
+        if found == 'undefined' or not version_compare_many(found, wanted)[0]:
+            return False
+        return True
+
+    def _get_candidates(self) -> T.List[T.Tuple[T.Callable[[TYPE_nkwargs, TYPE_nvar, TYPE_nkwargs], T.Optional[Dependency]], TYPE_nvar, TYPE_nkwargs]]:
+        candidates = []
+        # 1. check if any of the names is cached already.
+        for name in self.names:
+            candidates.append((self._do_dependency_cache, [name], {}))
+        # 2. check if the subproject fallback has already been configured.
+        if self.subproject_name:
+            candidates.append((self._do_existing_subproject, [self.subproject_name], self.subproject_kwargs))
+        # 3. check external dependency if we are not forced to use subproject
+        if not self.forcefallback or not self.subproject_name:
+            for name in self.names:
+                candidates.append((self._do_dependency, [name], {}))
+        # 4. configure the subproject
+        if self.subproject_name:
+            candidates.append((self._do_subproject, [self.subproject_name], self.subproject_kwargs))
+        return candidates
+
+    def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependency:
+        self.display_name = self.names[0] if self.names else '(anonymous)'
+        mods = extract_as_list(kwargs, 'modules')
+        if mods:
+            self.display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
+
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Dependency', mlog.bold(self.display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self._notfound_dependency()
+
+        # Check if usage of the subproject fallback is forced
+        wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+        force_fallback_for = self.coredata.get_option(OptionKey('force_fallback_for'))
+        self.nofallback = wrap_mode == WrapMode.nofallback
+        self.forcefallback = (force_fallback or
+                              wrap_mode == WrapMode.forcefallback or
+                              any(name in force_fallback_for for name in self.names) or
+                              self.subproject_name in force_fallback_for)
+
+        # Add an implicit subproject fallback if none has been set explicitly,
+        # unless implicit fallback is not allowed.
+        # Legacy: self.allow_fallback can be None when that kwarg is not defined
+        # in dependency('name'). In that case we don't want to use implicit
+        # fallback when required is false because user will typically fallback
+        # manually using cc.find_library() for example.
+        if not self.subproject_name and self.allow_fallback is not False:
+            for name in self.names:
+                subp_name, varname = self.wrap_resolver.find_dep_provider(name)
+                if subp_name:
+                    self.forcefallback |= subp_name in force_fallback_for
+                    if self.forcefallback or self.allow_fallback is True or required or self._get_subproject(subp_name):
+                        self._subproject_impl(subp_name, varname)
+                    break
+
+        candidates = self._get_candidates()
+
+        # writing just "dependency('')" is an error, because it can only fail
+        if not candidates and required:
+            raise InvalidArguments('Dependency is required but has no candidates.')
+
+        # Try all candidates, only the last one is really required.
+        last = len(candidates) - 1
+        for i, item in enumerate(candidates):
+            func, func_args, func_kwargs = item
+            func_kwargs['required'] = required and (i == last)
+            kwargs['required'] = required and (i == last)
+            dep = func(kwargs, func_args, func_kwargs)
+            if dep and dep.found():
+                # Override this dependency to have consistent results in subsequent
+                # dependency lookups.
+                for name in self.names:
+                    for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+                    identifier = dependencies.get_dep_identifier(name, kwargs)
+                    if identifier not in self.build.dependency_overrides[for_machine]:
+                        self.build.dependency_overrides[for_machine][identifier] = \
+                            build.DependencyOverride(dep, self.interpreter.current_node, explicit=False)
+                return dep
+            elif required and (dep or i == last):
+                # This was the last candidate or the dependency has been cached
+                # as not-found, or cached dependency version does not match,
+                # otherwise func() would have returned None instead.
+                raise DependencyException(f'Dependency {self.display_name!r} is required but not found.')
+            elif dep:
+                # Same as above, but the dependency is not required.
+                return dep
+        return self._notfound_dependency()
diff --git a/meson/mesonbuild/interpreter/interpreter.py b/meson/mesonbuild/interpreter/interpreter.py
new file mode 100644
index 000000000..4a60ff485
--- /dev/null
+++ b/meson/mesonbuild/interpreter/interpreter.py
@@ -0,0 +1,2794 @@
+# Copyright 2012-2021 The Meson development team
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mparser
+from .. import environment
+from .. import coredata
+from .. import dependencies
+from .. import mlog
+from .. import build
+from .. import optinterpreter
+from .. import compilers
+from ..wrap import wrap, WrapMode
+from .. import mesonlib
+from ..mesonlib import HoldableObject, FileMode, MachineChoice, OptionKey, listify, extract_as_list, has_path_sep
+from ..programs import ExternalProgram, NonExistingExternalProgram
+from ..dependencies import Dependency
+from ..depfile import DepFile
+from ..interpreterbase import ContainerTypeInfo, InterpreterBase, KwargInfo, typed_kwargs, typed_pos_args
+from ..interpreterbase import noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening, noSecondLevelHolderResolving, permissive_unholder_return
+from ..interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
+from ..interpreterbase import Disabler, disablerIfNotFound
+from ..interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
+from ..interpreterbase import ObjectHolder, RangeHolder
+from ..interpreterbase import TYPE_nkwargs, TYPE_nvar, TYPE_var
+from ..modules import ExtensionModule, ModuleObject, MutableModuleObject, NewExtensionModule, NotFoundExtensionModule
+from ..cmake import CMakeInterpreter
+from ..backend.backends import Backend, ExecutableSerialisation
+
+from . import interpreterobjects as OBJ
+from . import compiler as compilerOBJ
+from .mesonmain import MesonMain
+from .dependencyfallbacks import DependencyFallbacksHolder
+from .interpreterobjects import (
+    SubprojectHolder,
+    EnvironmentVariablesObject,
+    ConfigurationDataObject,
+    Test,
+    RunProcess,
+    extract_required_kwarg,
+    extract_search_dirs,
+    NullSubprojectInterpreter,
+)
+
+from pathlib import Path
+import os
+import shutil
+import uuid
+import re
+import stat
+import collections
+import typing as T
+import textwrap
+import importlib
+
+if T.TYPE_CHECKING:
+    from . import kwargs
+
+    # Input source types passed to Targets
+    SourceInputs = T.Union[mesonlib.File, build.GeneratedList, build.BuildTarget, build.BothLibraries,
+                           build.CustomTargetIndex, build.CustomTarget, build.GeneratedList, str]
+    # Input source types passed to the build.Target5 classes
+    SourceOutputs = T.Union[mesonlib.File, build.GeneratedList,
+                            build.BuildTarget, build.CustomTargetIndex, build.CustomTarget,
+                            build.GeneratedList]
+
+
+def _language_validator(l: T.List[str]) -> T.Optional[str]:
+    """Validate language keyword argument.
+
+    Particularly for functions like `add_compiler()`, and `add_*_args()`
+    """
+    diff = {a.lower() for a in l}.difference(compilers.all_languages)
+    if diff:
+        return f'unknown languages: {", ".join(diff)}'
+    return None
+
+
+def _install_mode_validator(mode: T.List[T.Union[str, bool, int]]) -> T.Optional[str]:
+    """Validate the `install_mode` keyword argument.
+
+    This is a rather odd thing, it's a scalar, or an array of 3 values in the form:
+    [(str | False), (str | int | False) = False, (str | int | False) = False]
+    Where the second and third arguments are not required, and are considered to
+    default to False.
+    """
+    if not mode:
+        return None
+    if True in mode:
+        return 'can only be a string or false, not true'
+    if len(mode) > 3:
+        return 'may have at most 3 elements'
+
+    perms = mode[0]
+    if not isinstance(perms, (str, bool)):
+        return 'permissions part must be a string or false'
+
+    if isinstance(perms, str):
+        if not len(perms) == 9:
+            return (f'permissions string must be exactly 9 characters, got "{len(perms)}" '
+                   'in the form rwxr-xr-x')
+        for i in [0, 3, 6]:
+            if perms[i] not in {'-', 'r'}:
+                return f'bit {i} must be "-" or "r", not {perms[i]}'
+        for i in [1, 4, 7]:
+            if perms[i] not in {'-', 'w'}:
+                return f'bit {i} must be "-" or "w", not {perms[i]}'
+        for i in [2, 5]:
+            if perms[i] not in {'-', 'x', 's', 'S'}:
+                return f'bit {i} must be "-", "s", "S", or "x", not {perms[i]}'
+        if perms[8] not in {'-', 'x', 't', 'T'}:
+            return f'bit 8 must be "-", "t", "T", or "x", not {perms[8]}'
+
+        if len(mode) >= 2 and not isinstance(mode[1], (int, str, bool)):
+            return 'second componenent must be a string, number, or False if provided'
+        if len(mode) >= 3 and not isinstance(mode[2], (int, str, bool)):
+            return 'third componenent must be a string, number, or False if provided'
+
+    return None
+
+
+def _install_mode_convertor(mode: T.Optional[T.List[T.Union[str, bool, int]]]) -> FileMode:
+    """Convert the DSL form of the `install_mode` keyword arugment to `FileMode`
+
+    This is not required, and if not required returns None
+
+    TODO: It's not clear to me why this needs to be None and not just return an
+    emtpy FileMode.
+    """
+    # this has already been validated by the validator
+    return FileMode(*[m if isinstance(m, str) else None for m in mode])
+
+
+_NATIVE_KW = KwargInfo(
+    'native', bool,
+    default=False,
+    convertor=lambda n: MachineChoice.BUILD if n else MachineChoice.HOST)
+
+_LANGUAGE_KW = KwargInfo(
+    'language', ContainerTypeInfo(list, str, allow_empty=False),
+    listify=True,
+    required=True,
+    validator=_language_validator,
+    convertor=lambda x: [i.lower() for i in x])
+
+_INSTALL_MODE_KW = KwargInfo(
+    'install_mode',
+    ContainerTypeInfo(list, (str, bool, int)),
+    listify=True,
+    default=[],
+    validator=_install_mode_validator,
+    convertor=_install_mode_convertor,
+)
+
+_REQUIRED_KW = KwargInfo(
+    'required',
+    (bool, coredata.UserFeatureOption),
+    default=True,
+    # TODO: extract_required_kwarg could be converted to a convertor
+)
+
+
+def stringifyUserArguments(args, quote=False):
+    if isinstance(args, list):
+        return '[%s]' % ', '.join([stringifyUserArguments(x, True) for x in args])
+    elif isinstance(args, dict):
+        return '{%s}' % ', '.join(['{} : {}'.format(stringifyUserArguments(k, True), stringifyUserArguments(v, True)) for k, v in args.items()])
+    elif isinstance(args, int):
+        return str(args)
+    elif isinstance(args, str):
+        return f"'{args}'" if quote else args
+    raise InvalidArguments('Function accepts only strings, integers, lists, dictionaries and lists thereof.')
+
+class Summary:
+    def __init__(self, project_name, project_version):
+        self.project_name = project_name
+        self.project_version = project_version
+        self.sections = collections.defaultdict(dict)
+        self.max_key_len = 0
+
+    def add_section(self, section, values, kwargs, subproject):
+        bool_yn = kwargs.get('bool_yn', False)
+        if not isinstance(bool_yn, bool):
+            raise InterpreterException('bool_yn keyword argument must be boolean')
+        list_sep = kwargs.get('list_sep')
+        if list_sep is not None and not isinstance(list_sep, str):
+            raise InterpreterException('list_sep keyword argument must be string')
+        for k, v in values.items():
+            if k in self.sections[section]:
+                raise InterpreterException(f'Summary section {section!r} already have key {k!r}')
+            formatted_values = []
+            for i in listify(v):
+                if isinstance(i, bool) and bool_yn:
+                    formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
+                elif isinstance(i, (str, int, bool)):
+                    formatted_values.append(str(i))
+                elif isinstance(i, (ExternalProgram, Dependency)):
+                    FeatureNew.single_use('dependency or external program in summary', '0.57.0', subproject)
+                    formatted_values.append(i.summary_value())
+                elif isinstance(i, coredata.UserOption):
+                    FeatureNew.single_use('feature option in summary', '0.58.0', subproject)
+                    formatted_values.append(i.printable_value())
+                else:
+                    m = 'Summary value in section {!r}, key {!r}, must be string, integer, boolean, dependency or external program'
+                    raise InterpreterException(m.format(section, k))
+            self.sections[section][k] = (formatted_values, list_sep)
+            self.max_key_len = max(self.max_key_len, len(k))
+
+    def dump(self):
+        mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
+        for section, values in self.sections.items():
+            mlog.log('')  # newline
+            if section:
+                mlog.log(' ', mlog.bold(section))
+            for k, v in values.items():
+                v, list_sep = v
+                padding = self.max_key_len - len(k)
+                end = ' ' if v else ''
+                mlog.log(' ' * 3, k + ' ' * padding + ':', end=end)
+                indent = self.max_key_len + 6
+                self.dump_value(v, list_sep, indent)
+        mlog.log('')  # newline
+
+    def dump_value(self, arr, list_sep, indent):
+        lines_sep = '\n' + ' ' * indent
+        if list_sep is None:
+            mlog.log(*arr, sep=lines_sep)
+            return
+        max_len = shutil.get_terminal_size().columns
+        line = []
+        line_len = indent
+        lines_sep = list_sep.rstrip() + lines_sep
+        for v in arr:
+            v_len = len(v) + len(list_sep)
+            if line and line_len + v_len > max_len:
+                mlog.log(*line, sep=list_sep, end=lines_sep)
+                line_len = indent
+                line = []
+            line.append(v)
+            line_len += v_len
+        mlog.log(*line, sep=list_sep)
+
+known_library_kwargs = (
+    build.known_shlib_kwargs |
+    build.known_stlib_kwargs
+)
+
+known_build_target_kwargs = (
+    known_library_kwargs |
+    build.known_exe_kwargs |
+    build.known_jar_kwargs |
+    {'target_type'}
+)
+
+TEST_KWARGS: T.List[KwargInfo] = [
+    KwargInfo('args', ContainerTypeInfo(list, (str, mesonlib.File, build.Target)),
+              listify=True, default=[]),
+    KwargInfo('should_fail', bool, default=False),
+    KwargInfo('timeout', int, default=30),
+    KwargInfo('workdir', str, default=None,
+              validator=lambda x: 'must be an absolute path' if not os.path.isabs(x) else None),
+    KwargInfo('protocol', str,
+              default='exitcode',
+              validator=lambda x: 'value must be one of "exitcode", "tap", "gtest", "rust"' if x not in {'exitcode', 'tap', 'gtest', 'rust'} else None,
+              since_values={'gtest': '0.55.0', 'rust': '0.57.0'}),
+    KwargInfo('depends', ContainerTypeInfo(list, (build.CustomTarget, build.BuildTarget)),
+              listify=True, default=[], since='0.46.0'),
+    KwargInfo('priority', int, default=0, since='0.52.0'),
+    # TODO: env needs reworks of the way the environment variable holder itself works probably
+    KwargInfo('env', (EnvironmentVariablesObject, list, dict, str)),
+    KwargInfo('suite', ContainerTypeInfo(list, str), listify=True, default=['']),  # yes, a list of empty string
+]
+
+permitted_dependency_kwargs = {
+    'allow_fallback',
+    'cmake_args',
+    'cmake_module_path',
+    'cmake_package_version',
+    'components',
+    'default_options',
+    'fallback',
+    'include_type',
+    'language',
+    'main',
+    'method',
+    'modules',
+    'native',
+    'not_found_message',
+    'optional_modules',
+    'private_headers',
+    'required',
+    'static',
+    'version',
+}
+
+class Interpreter(InterpreterBase, HoldableObject):
+
+    def __init__(
+                self,
+                _build: build.Build,
+                backend: T.Optional[Backend] = None,
+                subproject: str = '',
+                subdir: str = '',
+                subproject_dir: str = 'subprojects',
+                default_project_options: T.Optional[T.Dict[str, str]] = None,
+                mock: bool = False,
+                ast: T.Optional[mparser.CodeBlockNode] = None,
+                is_translated: bool = False,
+            ) -> None:
+        super().__init__(_build.environment.get_source_dir(), subdir, subproject)
+        self.an_unpicklable_object = mesonlib.an_unpicklable_object
+        self.build = _build
+        self.environment = self.build.environment
+        self.coredata = self.environment.get_coredata()
+        self.backend = backend
+        self.summary = {}
+        self.modules = {}
+        # Subproject directory is usually the name of the subproject, but can
+        # be different for dependencies provided by wrap files.
+        self.subproject_directory_name = subdir.split(os.path.sep)[-1]
+        self.subproject_dir = subproject_dir
+        self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+        if not mock and ast is None:
+            self.load_root_meson_file()
+            self.sanity_check_ast()
+        elif ast is not None:
+            self.ast = ast
+            self.sanity_check_ast()
+        self.builtin.update({'meson': MesonMain(self.build, self)})
+        self.generators: T.List[build.Generator] = []
+        self.processed_buildfiles = set() # type: T.Set[str]
+        self.project_args_frozen = False
+        self.global_args_frozen = False  # implies self.project_args_frozen
+        self.subprojects: T.Dict[str, SubprojectHolder] = {}
+        self.subproject_stack = []
+        self.configure_file_outputs = {}
+        # Passed from the outside, only used in subprojects.
+        if default_project_options:
+            self.default_project_options = default_project_options.copy()
+        else:
+            self.default_project_options = {}
+        self.project_default_options = {}
+        self.build_func_dict()
+        self.build_holder_map()
+
+        # build_def_files needs to be defined before parse_project is called
+        #
+        # For non-meson subprojects, we'll be using the ast. Even if it does
+        # exist we don't want to add a dependency on it, it's autogenerated
+        # from the actual build files, and is just for reference.
+        self.build_def_files = []
+        build_filename = os.path.join(self.subdir, environment.build_filename)
+        if not is_translated:
+            self.build_def_files.append(build_filename)
+        if not mock:
+            self.parse_project()
+        self._redetect_machines()
+
+    def _redetect_machines(self):
+        # Re-initialize machine descriptions. We can do a better job now because we
+        # have the compilers needed to gain more knowledge, so wipe out old
+        # inference and start over.
+        machines = self.build.environment.machines.miss_defaulting()
+        machines.build = environment.detect_machine_info(self.coredata.compilers.build)
+        self.build.environment.machines = machines.default_missing()
+        assert self.build.environment.machines.build.cpu is not None
+        assert self.build.environment.machines.host.cpu is not None
+        assert self.build.environment.machines.target.cpu is not None
+
+        self.builtin['build_machine'] = \
+            OBJ.MachineHolder(self.build.environment.machines.build, self)
+        self.builtin['host_machine'] = \
+            OBJ.MachineHolder(self.build.environment.machines.host, self)
+        self.builtin['target_machine'] = \
+            OBJ.MachineHolder(self.build.environment.machines.target, self)
+
+    # TODO: Why is this in interpreter.py and not CoreData or Environment?
+    def get_non_matching_default_options(self) -> T.Iterator[T.Tuple[str, str, coredata.UserOption]]:
+        for def_opt_name, def_opt_value in self.project_default_options.items():
+            cur_opt_value = self.coredata.options.get(def_opt_name)
+            try:
+                if cur_opt_value is not None and cur_opt_value.validate_value(def_opt_value) != cur_opt_value.value:
+                    yield (str(def_opt_name), def_opt_value, cur_opt_value)
+            except mesonlib.MesonException:
+                # Since the default value does not validate, it cannot be in use
+                # Report the user-specified value as non-matching
+                yield (str(def_opt_name), def_opt_value, cur_opt_value)
+
+    def build_func_dict(self):
+        self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
+                           'add_project_arguments': self.func_add_project_arguments,
+                           'add_global_link_arguments': self.func_add_global_link_arguments,
+                           'add_project_link_arguments': self.func_add_project_link_arguments,
+                           'add_test_setup': self.func_add_test_setup,
+                           'add_languages': self.func_add_languages,
+                           'alias_target': self.func_alias_target,
+                           'assert': self.func_assert,
+                           'benchmark': self.func_benchmark,
+                           'build_target': self.func_build_target,
+                           'configuration_data': self.func_configuration_data,
+                           'configure_file': self.func_configure_file,
+                           'custom_target': self.func_custom_target,
+                           'declare_dependency': self.func_declare_dependency,
+                           'dependency': self.func_dependency,
+                           'disabler': self.func_disabler,
+                           'environment': self.func_environment,
+                           'error': self.func_error,
+                           'executable': self.func_executable,
+                           'generator': self.func_generator,
+                           'gettext': self.func_gettext,
+                           'get_option': self.func_get_option,
+                           'get_variable': self.func_get_variable,
+                           'files': self.func_files,
+                           'find_library': self.func_find_library,
+                           'find_program': self.func_find_program,
+                           'include_directories': self.func_include_directories,
+                           'import': self.func_import,
+                           'install_data': self.func_install_data,
+                           'install_headers': self.func_install_headers,
+                           'install_man': self.func_install_man,
+                           'install_subdir': self.func_install_subdir,
+                           'is_disabler': self.func_is_disabler,
+                           'is_variable': self.func_is_variable,
+                           'jar': self.func_jar,
+                           'join_paths': self.func_join_paths,
+                           'library': self.func_library,
+                           'message': self.func_message,
+                           'warning': self.func_warning,
+                           'option': self.func_option,
+                           'project': self.func_project,
+                           'run_target': self.func_run_target,
+                           'run_command': self.func_run_command,
+                           'set_variable': self.func_set_variable,
+                           'subdir': self.func_subdir,
+                           'subdir_done': self.func_subdir_done,
+                           'subproject': self.func_subproject,
+                           'summary': self.func_summary,
+                           'shared_library': self.func_shared_lib,
+                           'shared_module': self.func_shared_module,
+                           'static_library': self.func_static_lib,
+                           'both_libraries': self.func_both_lib,
+                           'test': self.func_test,
+                           'vcs_tag': self.func_vcs_tag,
+                           'range': self.func_range,
+                           })
+        if 'MESON_UNIT_TEST' in os.environ:
+            self.funcs.update({'exception': self.func_exception})
+
+    def build_holder_map(self) -> None:
+        '''
+            Build a mapping of `HoldableObject` types to their corresponding
+            `ObjectHolder`s. This mapping is used in `InterpreterBase` to automatically
+            holderify all returned values from methods and functions.
+        '''
+        self.holder_map.update({
+            mesonlib.File: OBJ.FileHolder,
+            build.SharedLibrary: OBJ.SharedLibraryHolder,
+            build.StaticLibrary: OBJ.StaticLibraryHolder,
+            build.BothLibraries: OBJ.BothLibrariesHolder,
+            build.SharedModule: OBJ.SharedModuleHolder,
+            build.Executable: OBJ.ExecutableHolder,
+            build.Jar: OBJ.JarHolder,
+            build.CustomTarget: OBJ.CustomTargetHolder,
+            build.CustomTargetIndex: OBJ.CustomTargetIndexHolder,
+            build.Generator: OBJ.GeneratorHolder,
+            build.GeneratedList: OBJ.GeneratedListHolder,
+            build.ExtractedObjects: OBJ.GeneratedObjectsHolder,
+            build.RunTarget: OBJ.RunTargetHolder,
+            build.AliasTarget: OBJ.AliasTargetHolder,
+            build.Headers: OBJ.HeadersHolder,
+            build.Man: OBJ.ManHolder,
+            build.Data: OBJ.DataHolder,
+            build.InstallDir: OBJ.InstallDirHolder,
+            build.IncludeDirs: OBJ.IncludeDirsHolder,
+            compilers.RunResult: compilerOBJ.TryRunResultHolder,
+            dependencies.ExternalLibrary: OBJ.ExternalLibraryHolder,
+            coredata.UserFeatureOption: OBJ.FeatureOptionHolder,
+        })
+
+        '''
+            Build a mapping of `HoldableObject` base classes to their
+            corresponding `ObjectHolder`s. The difference to `self.holder_map`
+            is that the keys here define an upper bound instead of requireing an
+            exact match.
+
+            The mappings defined here are only used when there was no direct hit
+            found in `self.holder_map`.
+        '''
+        self.bound_holder_map.update({
+            dependencies.Dependency: OBJ.DependencyHolder,
+            ExternalProgram: OBJ.ExternalProgramHolder,
+            compilers.Compiler: compilerOBJ.CompilerHolder,
+            ModuleObject: OBJ.ModuleObjectHolder,
+            MutableModuleObject: OBJ.MutableModuleObjectHolder,
+        })
+
+    def append_holder_map(self, held_type: T.Type[mesonlib.HoldableObject], holder_type: T.Type[ObjectHolder]) -> None:
+        '''
+            Adds one additional mapping to the `holder_map`.
+
+            The intended use for this function is in the `initialize` method of
+            modules to register custom object holders.
+        '''
+        self.holder_map.update({
+            held_type: holder_type
+        })
+
+    def process_new_values(self, invalues: T.List[TYPE_var]) -> None:
+        invalues = listify(invalues)
+        for v in invalues:
+            if isinstance(v, ObjectHolder):
+                raise InterpreterException('Modules must not return ObjectHolders')
+            if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
+                self.add_target(v.name, v)
+            elif isinstance(v, list):
+                self.process_new_values(v)
+            elif isinstance(v, ExecutableSerialisation):
+                v.subproject = self.subproject
+                self.build.install_scripts.append(v)
+            elif isinstance(v, build.Data):
+                self.build.data.append(v)
+            elif isinstance(v, dependencies.InternalDependency):
+                # FIXME: This is special cased and not ideal:
+                # The first source is our new VapiTarget, the rest are deps
+                self.process_new_values(v.sources[0])
+            elif isinstance(v, build.InstallDir):
+                self.build.install_dirs.append(v)
+            elif isinstance(v, Test):
+                self.build.tests.append(v)
+            elif isinstance(v, (int, str, bool, Disabler, ObjectHolder, build.GeneratedList,
+                                ExternalProgram)):
+                pass
+            else:
+                raise InterpreterException('Module returned a value of unknown type.')
+
+    def get_build_def_files(self) -> T.List[str]:
+        return self.build_def_files
+
+    def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
+        # Use relative path for files within source directory, and absolute path
+        # for system files. Skip files within build directory. Also skip not regular
+        # files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this
+        # is especially important to convert '/' to '\' on Windows.
+        if isinstance(f, mesonlib.File):
+            if f.is_built:
+                return
+            f = os.path.normpath(f.relative_name())
+        elif os.path.isfile(f) and not f.startswith('/dev'):
+            srcdir = Path(self.environment.get_source_dir())
+            builddir = Path(self.environment.get_build_dir())
+            try:
+                f = Path(f).resolve()
+            except OSError:
+                f = Path(f)
+                s = f.stat()
+                if (hasattr(s, 'st_file_attributes') and
+                        s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+                        s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+                    # This is a Windows Store link which we can't
+                    # resolve, so just do our best otherwise.
+                    f = f.parent.resolve() / f.name
+                else:
+                    raise
+            if builddir in f.parents:
+                return
+            if srcdir in f.parents:
+                f = f.relative_to(srcdir)
+            f = str(f)
+        else:
+            return
+        if f not in self.build_def_files:
+            self.build_def_files.append(f)
+
+    def get_variables(self):
+        return self.variables
+
+    def check_stdlibs(self):
+        machine_choices = [MachineChoice.HOST]
+        if self.coredata.is_cross_build():
+            machine_choices.append(MachineChoice.BUILD)
+        for for_machine in machine_choices:
+            props = self.build.environment.properties[for_machine]
+            for l in self.coredata.compilers[for_machine].keys():
+                try:
+                    di = mesonlib.stringlistify(props.get_stdlib(l))
+                except KeyError:
+                    continue
+                if len(di) == 1:
+                    FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject)
+                kwargs = {'native': for_machine is MachineChoice.BUILD,
+                          }
+                name = l + '_stdlib'
+                df = DependencyFallbacksHolder(self, [name])
+                df.set_fallback(di)
+                dep = df.lookup(kwargs, force_fallback=True)
+                self.build.stdlibs[for_machine][l] = dep
+
+    def _import_module(self, modname: str, required: bool) -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
+        if modname in self.modules:
+            return self.modules[modname]
+        try:
+            module = importlib.import_module('mesonbuild.modules.' + modname)
+        except ImportError:
+            if required:
+                raise InvalidArguments(f'Module "{modname}" does not exist')
+            ext_module = NotFoundExtensionModule()
+        else:
+            ext_module = module.initialize(self)
+            assert isinstance(ext_module, (ExtensionModule, NewExtensionModule))
+        self.modules[modname] = ext_module
+        return ext_module
+
+    @typed_pos_args('import', str)
+    @typed_kwargs(
+        'import',
+        _REQUIRED_KW.evolve(since='0.59.0'),
+        KwargInfo('disabler', bool, default=False, since='0.59.0'),
+    )
+    @disablerIfNotFound
+    def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
+                    kwargs: 'kwargs.FuncImportModule') -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
+        modname = args[0]
+        disabled, required, _ = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            return NotFoundExtensionModule()
+
+        if modname.startswith('unstable-'):
+            plainname = modname.split('-', 1)[1]
+            try:
+                # check if stable module exists
+                mod = self._import_module(plainname, required)
+                # XXX: this is acutally not helpful, since it doesn't do a version check
+                mlog.warning(f'Module {modname} is now stable, please use the {plainname} module instead.')
+                return mod
+            except InvalidArguments:
+                mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
+                modname = 'unstable_' + plainname
+        return self._import_module(modname, required)
+
+    @stringArgs
+    @noKwargs
+    def func_files(self, node, args, kwargs):
+        return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in args]
+
+    # Used by declare_dependency() and pkgconfig.generate()
+    def extract_variables(self, kwargs, argname='variables', list_new=False, dict_new=False):
+        variables = kwargs.get(argname, {})
+        if isinstance(variables, dict):
+            if dict_new and variables:
+                FeatureNew.single_use('variables as dictionary', '0.56.0', self.subproject)
+        else:
+            varlist = mesonlib.stringlistify(variables)
+            if list_new:
+                FeatureNew.single_use('variables as list of strings', '0.56.0', self.subproject)
+            variables = collections.OrderedDict()
+            for v in varlist:
+                try:
+                    (key, value) = v.split('=', 1)
+                except ValueError:
+                    raise InterpreterException(f'Variable {v!r} must have a value separated by equals sign.')
+                variables[key.strip()] = value.strip()
+        for k, v in variables.items():
+            if not k or not v:
+                raise InterpreterException('Empty variable name or value')
+            if any(c.isspace() for c in k):
+                raise InterpreterException(f'Invalid whitespace in variable name "{k}"')
+            if not isinstance(v, str):
+                raise InterpreterException('variables values must be strings.')
+        return variables
+
+    @FeatureNewKwargs('declare_dependency', '0.46.0', ['link_whole'])
+    @FeatureNewKwargs('declare_dependency', '0.54.0', ['variables'])
+    @permittedKwargs({'include_directories', 'link_with', 'sources', 'dependencies',
+                      'compile_args', 'link_args', 'link_whole', 'version',
+                      'variables' })
+    @noPosargs
+    def func_declare_dependency(self, node, args, kwargs):
+        version = kwargs.get('version', self.project_version)
+        if not isinstance(version, str):
+            raise InterpreterException('Version must be a string.')
+        incs = self.extract_incdirs(kwargs)
+        libs = extract_as_list(kwargs, 'link_with')
+        libs_whole = extract_as_list(kwargs, 'link_whole')
+        sources = extract_as_list(kwargs, 'sources')
+        sources = listify(self.source_strings_to_files(sources))
+        deps = extract_as_list(kwargs, 'dependencies')
+        compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))
+        link_args = mesonlib.stringlistify(kwargs.get('link_args', []))
+        variables = self.extract_variables(kwargs, list_new=True)
+        final_deps = []
+        for d in deps:
+            if not isinstance(d, (dependencies.Dependency, dependencies.ExternalLibrary, dependencies.InternalDependency)):
+                raise InterpreterException('Dependencies must be external deps')
+            final_deps.append(d)
+        for l in libs:
+            if isinstance(l, dependencies.Dependency):
+                raise InterpreterException('''Entries in "link_with" may only be self-built targets,
+external dependencies (including libraries) must go to "dependencies".''')
+        dep = dependencies.InternalDependency(version, incs, compile_args,
+                                              link_args, libs, libs_whole, sources, final_deps,
+                                              variables)
+        return dep
+
+    @noKwargs
+    def func_assert(self, node, args, kwargs):
+        if len(args) == 1:
+            FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject)
+            value = args[0]
+            message = None
+        elif len(args) == 2:
+            value, message = args
+            if not isinstance(message, str):
+                raise InterpreterException('Assert message not a string.')
+        else:
+            raise InterpreterException('Assert takes between one and two arguments')
+        if not isinstance(value, bool):
+            raise InterpreterException('Assert value not bool.')
+        if not value:
+            if message is None:
+                from ..ast import AstPrinter
+                printer = AstPrinter()
+                node.args.arguments[0].accept(printer)
+                message = printer.result
+            raise InterpreterException('Assert failed: ' + message)
+
+    def validate_arguments(self, args, argcount, arg_types):
+        if argcount is not None:
+            if argcount != len(args):
+                raise InvalidArguments('Expected %d arguments, got %d.' %
+                                       (argcount, len(args)))
+        for actual, wanted in zip(args, arg_types):
+            if wanted is not None:
+                if not isinstance(actual, wanted):
+                    raise InvalidArguments('Incorrect argument type.')
+
+    @FeatureNewKwargs('run_command', '0.50.0', ['env'])
+    @FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture'])
+    @permittedKwargs({'check', 'capture', 'env'})
+    def func_run_command(self, node, args, kwargs):
+        return self.run_command_impl(node, args, kwargs)
+
+    def run_command_impl(self,
+                         node: mparser.BaseNode,
+                         args: T.Sequence[TYPE_nvar],
+                         kwargs: TYPE_nkwargs,
+                         in_builddir: bool = False) -> RunProcess:
+        if len(args) < 1:
+            raise InterpreterException('Not enough arguments')
+        cmd, *cargs = args
+        capture = kwargs.get('capture', True)
+        srcdir = self.environment.get_source_dir()
+        builddir = self.environment.get_build_dir()
+
+        check = kwargs.get('check', False)
+        if not isinstance(check, bool):
+            raise InterpreterException('Check must be boolean.')
+
+        env = self.unpack_env_kwarg(kwargs)
+
+        m = 'must be a string, or the output of find_program(), files() '\
+            'or configure_file(), or a compiler object; not {!r}'
+        expanded_args = []
+        if isinstance(cmd, build.Executable):
+            progname = node.args.arguments[0].value
+            msg = 'Program {!r} was overridden with the compiled executable {!r}'\
+                    ' and therefore cannot be used during configuration'
+            raise InterpreterException(msg.format(progname, cmd.description()))
+        if isinstance(cmd, ExternalProgram):
+            if not cmd.found():
+                raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable')
+        elif isinstance(cmd, compilers.Compiler):
+            exelist = cmd.get_exelist()
+            cmd = exelist[0]
+            prog = ExternalProgram(cmd, silent=True)
+            if not prog.found():
+                raise InterpreterException(f'Program {cmd!r} not found or not executable')
+            cmd = prog
+            expanded_args = exelist[1:]
+        else:
+            if isinstance(cmd, mesonlib.File):
+                cmd = cmd.absolute_path(srcdir, builddir)
+            elif not isinstance(cmd, str):
+                raise InterpreterException('First argument ' + m.format(cmd))
+            # Prefer scripts in the current source directory
+            search_dir = os.path.join(srcdir, self.subdir)
+            prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
+            if not prog.found():
+                raise InterpreterException(f'Program or command {cmd!r} not found or not executable')
+            cmd = prog
+        for a in listify(cargs):
+            if isinstance(a, str):
+                expanded_args.append(a)
+            elif isinstance(a, mesonlib.File):
+                expanded_args.append(a.absolute_path(srcdir, builddir))
+            elif isinstance(a, ExternalProgram):
+                expanded_args.append(a.get_path())
+            else:
+                raise InterpreterException('Arguments ' + m.format(a))
+        # If any file that was used as an argument to the command
+        # changes, we must re-run the configuration step.
+        self.add_build_def_file(cmd.get_path())
+        for a in expanded_args:
+            if not os.path.isabs(a):
+                a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
+            self.add_build_def_file(a)
+        return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
+                          self.environment.get_build_command() + ['introspect'],
+                          in_builddir=in_builddir, check=check, capture=capture)
+
+    @stringArgs
+    def func_gettext(self, nodes, args, kwargs):
+        raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead')
+
+    def func_option(self, nodes, args, kwargs):
+        raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
+
+    @FeatureNewKwargs('subproject', '0.38.0', ['default_options'])
+    @permittedKwargs({'version', 'default_options', 'required'})
+    @stringArgs
+    def func_subproject(self, nodes, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Subproject takes exactly one argument')
+        return self.do_subproject(args[0], 'meson', kwargs)
+
+    def disabled_subproject(self, subp_name, disabled_feature=None, exception=None):
+        sub = SubprojectHolder(NullSubprojectInterpreter(), os.path.join(self.subproject_dir, subp_name),
+                               disabled_feature=disabled_feature, exception=exception)
+        self.subprojects[subp_name] = sub
+        self.coredata.initialized_subprojects.add(subp_name)
+        return sub
+
+    def do_subproject(self, subp_name: str, method: str, kwargs):
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.disabled_subproject(subp_name, disabled_feature=feature)
+
+        default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
+        default_options = coredata.create_options_dict(default_options, subp_name)
+
+        if subp_name == '':
+            raise InterpreterException('Subproject name must not be empty.')
+        if subp_name[0] == '.':
+            raise InterpreterException('Subproject name must not start with a period.')
+        if '..' in subp_name:
+            raise InterpreterException('Subproject name must not contain a ".." path segment.')
+        if os.path.isabs(subp_name):
+            raise InterpreterException('Subproject name must not be an absolute path.')
+        if has_path_sep(subp_name):
+            mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
+                         location=self.current_node)
+        if subp_name in self.subproject_stack:
+            fullstack = self.subproject_stack + [subp_name]
+            incpath = ' => '.join(fullstack)
+            raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
+        if subp_name in self.subprojects:
+            subproject = self.subprojects[subp_name]
+            if required and not subproject.found():
+                raise InterpreterException('Subproject "%s" required but not found.' % (subproject.subdir))
+            return subproject
+
+        r = self.environment.wrap_resolver
+        try:
+            subdir = r.resolve(subp_name, method, self.subproject)
+        except wrap.WrapException as e:
+            if not required:
+                mlog.log(e)
+                mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
+                return self.disabled_subproject(subp_name, exception=e)
+            raise e
+
+        subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
+        os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
+        self.global_args_frozen = True
+
+        stack = ':'.join(self.subproject_stack + [subp_name])
+        m = ['\nExecuting subproject', mlog.bold(stack)]
+        if method != 'meson':
+            m += ['method', mlog.bold(method)]
+        mlog.log(*m,'\n', nested=False)
+
+        try:
+            if method == 'meson':
+                return self._do_subproject_meson(subp_name, subdir, default_options, kwargs)
+            elif method == 'cmake':
+                return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs)
+            else:
+                raise mesonlib.MesonBugException(f'The method {method} is invalid for the subproject {subp_name}')
+        # Invalid code is always an error
+        except InvalidCode:
+            raise
+        except Exception as e:
+            if not required:
+                with mlog.nested(subp_name):
+                    # Suppress the 'ERROR:' prefix because this exception is not
+                    # fatal and VS CI treat any logs with "ERROR:" as fatal.
+                    mlog.exception(e, prefix=mlog.yellow('Exception:'))
+                mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)')
+                return self.disabled_subproject(subp_name, exception=e)
+            raise e
+
+    def _do_subproject_meson(self, subp_name: str, subdir: str, default_options, kwargs,
+                             ast: T.Optional[mparser.CodeBlockNode] = None,
+                             build_def_files: T.Optional[T.List[str]] = None,
+                             is_translated: bool = False) -> SubprojectHolder:
+        with mlog.nested(subp_name):
+            new_build = self.build.copy()
+            subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
+                               default_options, ast=ast, is_translated=is_translated)
+            # Those lists are shared by all interpreters. That means that
+            # even if the subproject fails, any modification that the subproject
+            # made to those lists will affect the parent project.
+            subi.subprojects = self.subprojects
+            subi.modules = self.modules
+            subi.holder_map = self.holder_map
+            subi.bound_holder_map = self.bound_holder_map
+            subi.summary = self.summary
+
+            subi.subproject_stack = self.subproject_stack + [subp_name]
+            current_active = self.active_projectname
+            current_warnings_counter = mlog.log_warnings_counter
+            mlog.log_warnings_counter = 0
+            subi.run()
+            subi_warnings = mlog.log_warnings_counter
+            mlog.log_warnings_counter = current_warnings_counter
+
+            mlog.log('Subproject', mlog.bold(subp_name), 'finished.')
+
+        mlog.log()
+
+        if 'version' in kwargs:
+            pv = subi.project_version
+            wanted = kwargs['version']
+            if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
+                raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
+        self.active_projectname = current_active
+        self.subprojects.update(subi.subprojects)
+        self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings)
+        # Duplicates are possible when subproject uses files from project root
+        if build_def_files:
+            self.build_def_files = list(set(self.build_def_files + build_def_files))
+        # We always need the subi.build_def_files, to propgate sub-sub-projects
+        self.build_def_files = list(set(self.build_def_files + subi.build_def_files))
+        self.build.merge(subi.build)
+        self.build.subprojects[subp_name] = subi.project_version
+        self.coredata.initialized_subprojects.add(subp_name)
+        return self.subprojects[subp_name]
+
+    def _do_subproject_cmake(self, subp_name, subdir, subdir_abs, default_options, kwargs):
+        with mlog.nested(subp_name):
+            new_build = self.build.copy()
+            prefix = self.coredata.options[OptionKey('prefix')].value
+
+            from ..modules.cmake import CMakeSubprojectOptions
+            options = kwargs.get('options', CMakeSubprojectOptions())
+            if not isinstance(options, CMakeSubprojectOptions):
+                raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions'
+                                           ' object (created by cmake.subproject_options())')
+
+            cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', []))
+            cmake_options += options.cmake_options
+            cm_int = CMakeInterpreter(new_build, Path(subdir), Path(subdir_abs), Path(prefix), new_build.environment, self.backend)
+            cm_int.initialise(cmake_options)
+            cm_int.analyse()
+
+            # Generate a meson ast and execute it with the normal do_subproject_meson
+            ast = cm_int.pretend_to_be_meson(options.target_options)
+
+            mlog.log()
+            with mlog.nested('cmake-ast'):
+                mlog.log('Processing generated meson AST')
+
+                # Debug print the generated meson file
+                from ..ast import AstIndentationGenerator, AstPrinter
+                printer = AstPrinter()
+                ast.accept(AstIndentationGenerator())
+                ast.accept(printer)
+                printer.post_process()
+                meson_filename = os.path.join(self.build.environment.get_build_dir(), subdir, 'meson.build')
+                with open(meson_filename, "w", encoding='utf-8') as f:
+                    f.write(printer.result)
+
+                mlog.log('Build file:', meson_filename)
+                mlog.cmd_ci_include(meson_filename)
+                mlog.log()
+
+            result = self._do_subproject_meson(subp_name, subdir, default_options, kwargs, ast, cm_int.bs_files, is_translated=True)
+            result.cm_interpreter = cm_int
+
+        mlog.log()
+        return result
+
+    def get_option_internal(self, optname: str):
+        key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
+
+        if not key.is_project():
+            for opts in [self.coredata.options, compilers.base_options]:
+                v = opts.get(key)
+                if v is None or v.yielding:
+                    v = opts.get(key.as_root())
+                if v is not None:
+                    return v
+
+        try:
+            opt = self.coredata.options[key]
+            if opt.yielding and key.subproject and key.as_root() in self.coredata.options:
+                popt = self.coredata.options[key.as_root()]
+                if type(opt) is type(popt):
+                    opt = popt
+                else:
+                    # Get class name, then option type as a string
+                    opt_type = opt.__class__.__name__[4:][:-6].lower()
+                    popt_type = popt.__class__.__name__[4:][:-6].lower()
+                    # This is not a hard error to avoid dependency hell, the workaround
+                    # when this happens is to simply set the subproject's option directly.
+                    mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
+                                 'to parent option of type {3!r}, ignoring parent value. '
+                                 'Use -D{2}:{0}=value to set the value for this option manually'
+                                 '.'.format(optname, opt_type, self.subproject, popt_type),
+                                 location=self.current_node)
+            return opt
+        except KeyError:
+            pass
+
+        raise InterpreterException('Tried to access unknown option "%s".' % optname)
+
+    @stringArgs
+    @noKwargs
+    def func_get_option(self, nodes, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Argument required for get_option.')
+        optname = args[0]
+        if ':' in optname:
+            raise InterpreterException('Having a colon in option name is forbidden, '
+                                       'projects are not allowed to directly access '
+                                       'options of other subprojects.')
+        opt = self.get_option_internal(optname)
+        if isinstance(opt, coredata.UserFeatureOption):
+            opt.name = optname
+            return opt
+        elif isinstance(opt, coredata.UserOption):
+            return opt.value
+        return opt
+
+    @noKwargs
+    def func_configuration_data(self, node, args, kwargs):
+        if len(args) > 1:
+            raise InterpreterException('configuration_data takes only one optional positional arguments')
+        elif len(args) == 1:
+            FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject)
+            initial_values = args[0]
+            if not isinstance(initial_values, dict):
+                raise InterpreterException('configuration_data first argument must be a dictionary')
+        else:
+            initial_values = {}
+        return ConfigurationDataObject(self.subproject, initial_values)
+
+    def set_backend(self):
+        # The backend is already set when parsing subprojects
+        if self.backend is not None:
+            return
+        backend = self.coredata.get_option(OptionKey('backend'))
+        from ..backend import backends
+        self.backend = backends.get_backend_from_name(backend, self.build, self)
+
+        if self.backend is None:
+            raise InterpreterException('Unknown backend "%s".' % backend)
+        if backend != self.backend.name:
+            if self.backend.name.startswith('vs'):
+                mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
+            self.coredata.set_option(OptionKey('backend'), self.backend.name)
+
+        # Only init backend options on first invocation otherwise it would
+        # override values previously set from command line.
+        if self.environment.first_invocation:
+            self.coredata.init_backend_options(backend)
+
+        options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+        self.coredata.set_options(options)
+
+    @stringArgs
+    @permittedKwargs({'version', 'meson_version', 'default_options', 'license', 'subproject_dir'})
+    def func_project(self, node, args, kwargs):
+        if len(args) < 1:
+            raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+        proj_name, *proj_langs = args
+        if ':' in proj_name:
+            raise InvalidArguments(f"Project name {proj_name!r} must not contain ':'")
+
+        # This needs to be evaluated as early as possible, as meson uses this
+        # for things like deprecation testing.
+        if 'meson_version' in kwargs:
+            cv = coredata.version
+            pv = kwargs['meson_version']
+            if not mesonlib.version_compare(cv, pv):
+                raise InterpreterException(f'Meson version is {cv} but project requires {pv}')
+            mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
+
+        if os.path.exists(self.option_file):
+            oi = optinterpreter.OptionInterpreter(self.subproject)
+            oi.process(self.option_file)
+            self.coredata.update_project_options(oi.options)
+            self.add_build_def_file(self.option_file)
+
+        # Do not set default_options on reconfigure otherwise it would override
+        # values previously set from command line. That means that changing
+        # default_options in a project will trigger a reconfigure but won't
+        # have any effect.
+        self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
+        self.project_default_options = coredata.create_options_dict(self.project_default_options, self.subproject)
+
+        # If this is the first invocation we alway sneed to initialize
+        # builtins, if this is a subproject that is new in a re-invocation we
+        # need to initialize builtins for that
+        if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
+            default_options = self.project_default_options.copy()
+            default_options.update(self.default_project_options)
+            self.coredata.init_builtins(self.subproject)
+        else:
+            default_options = {}
+        self.coredata.set_default_options(default_options, self.subproject, self.environment)
+
+        if not self.is_subproject():
+            self.build.project_name = proj_name
+        self.active_projectname = proj_name
+        version = kwargs.get('version', 'undefined')
+        if isinstance(version, list):
+            if len(version) != 1:
+                raise InvalidCode('Version argument is an array with more than one entry.')
+            version = version[0]
+        if isinstance(version, mesonlib.File):
+            FeatureNew.single_use('version from file', '0.57.0', self.subproject)
+            self.add_build_def_file(version)
+            ifname = version.absolute_path(self.environment.source_dir,
+                                           self.environment.build_dir)
+            try:
+                ver_data = Path(ifname).read_text(encoding='utf-8').split('\n')
+            except FileNotFoundError:
+                raise InterpreterException('Version file not found.')
+            if len(ver_data) == 2 and ver_data[1] == '':
+                ver_data = ver_data[0:1]
+            if len(ver_data) != 1:
+                raise InterpreterException('Version file must contain exactly one line of text.')
+            self.project_version = ver_data[0]
+        elif isinstance(version, str):
+            self.project_version = version
+        else:
+            raise InvalidCode('The version keyword argument must be a string or a file.')
+        if self.build.project_version is None:
+            self.build.project_version = self.project_version
+        proj_license = mesonlib.stringlistify(kwargs.get('license', 'unknown'))
+        self.build.dep_manifest[proj_name] = {'version': self.project_version,
+                                              'license': proj_license}
+        if self.subproject in self.build.projects:
+            raise InvalidCode('Second call to project().')
+
+        # spdirname is the subproject_dir for this project, relative to self.subdir.
+        # self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
+        spdirname = kwargs.get('subproject_dir')
+        if spdirname:
+            if not isinstance(spdirname, str):
+                raise InterpreterException('Subproject_dir must be a string')
+            if os.path.isabs(spdirname):
+                raise InterpreterException('Subproject_dir must not be an absolute path.')
+            if spdirname.startswith('.'):
+                raise InterpreterException('Subproject_dir must not begin with a period.')
+            if '..' in spdirname:
+                raise InterpreterException('Subproject_dir must not contain a ".." segment.')
+            if not self.is_subproject():
+                self.subproject_dir = spdirname
+        else:
+            spdirname = 'subprojects'
+        self.build.subproject_dir = self.subproject_dir
+
+        # Load wrap files from this (sub)project.
+        wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+        if not self.is_subproject() or wrap_mode != WrapMode.nopromote:
+            subdir = os.path.join(self.subdir, spdirname)
+            r = wrap.Resolver(self.environment.get_source_dir(), subdir, wrap_mode)
+            if self.is_subproject():
+                self.environment.wrap_resolver.merge_wraps(r)
+            else:
+                self.environment.wrap_resolver = r
+
+        self.build.projects[self.subproject] = proj_name
+        mlog.log('Project name:', mlog.bold(proj_name))
+        mlog.log('Project version:', mlog.bold(self.project_version))
+
+        self.add_languages(proj_langs, True, MachineChoice.HOST)
+        self.add_languages(proj_langs, False, MachineChoice.BUILD)
+
+        self.set_backend()
+        if not self.is_subproject():
+            self.check_stdlibs()
+
+    @FeatureNewKwargs('add_languages', '0.54.0', ['native'])
+    @permittedKwargs({'required', 'native'})
+    @stringArgs
+    def func_add_languages(self, node, args, kwargs):
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            for lang in sorted(args, key=compilers.sort_clink):
+                mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        if 'native' in kwargs:
+            return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs))
+        else:
+            # absent 'native' means 'both' for backwards compatibility
+            tv = FeatureNew.get_target_version(self.subproject)
+            if FeatureNew.check_version(tv, '0.54.0'):
+                mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+                             location=self.current_node)
+
+            success = self.add_languages(args, False, MachineChoice.BUILD)
+            success &= self.add_languages(args, required, MachineChoice.HOST)
+            return success
+
+    @noArgsFlattening
+    @noKwargs
+    def func_message(self, node, args, kwargs):
+        if len(args) > 1:
+            FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject)
+        args_str = [stringifyUserArguments(i) for i in args]
+        self.message_impl(args_str)
+
+    def message_impl(self, args):
+        mlog.log(mlog.bold('Message:'), *args)
+
+    @noArgsFlattening
+    @FeatureNewKwargs('summary', '0.54.0', ['list_sep'])
+    @permittedKwargs({'section', 'bool_yn', 'list_sep'})
+    @FeatureNew('summary', '0.53.0')
+    def func_summary(self, node, args, kwargs):
+        if len(args) == 1:
+            if not isinstance(args[0], dict):
+                raise InterpreterException('Summary first argument must be dictionary.')
+            values = args[0]
+        elif len(args) == 2:
+            if not isinstance(args[0], str):
+                raise InterpreterException('Summary first argument must be string.')
+            values = {args[0]: args[1]}
+        else:
+            raise InterpreterException('Summary accepts at most 2 arguments.')
+        section = kwargs.get('section', '')
+        if not isinstance(section, str):
+            raise InterpreterException('Summary\'s section keyword argument must be string.')
+        self.summary_impl(section, values, kwargs)
+
+    def summary_impl(self, section, values, kwargs):
+        if self.subproject not in self.summary:
+            self.summary[self.subproject] = Summary(self.active_projectname, self.project_version)
+        self.summary[self.subproject].add_section(section, values, kwargs, self.subproject)
+
+    def _print_summary(self):
+        # Add automatic 'Supbrojects' section in main project.
+        all_subprojects = collections.OrderedDict()
+        for name, subp in sorted(self.subprojects.items()):
+            value = subp.found()
+            if subp.disabled_feature:
+                value = [value, f'Feature {subp.disabled_feature!r} disabled']
+            elif subp.exception:
+                value = [value, str(subp.exception)]
+            elif subp.warnings > 0:
+                value = [value, f'{subp.warnings} warnings']
+            all_subprojects[name] = value
+        if all_subprojects:
+            self.summary_impl('Subprojects', all_subprojects,
+                              {'bool_yn': True,
+                               'list_sep': ' ',
+                              })
+        # Print all summaries, main project last.
+        mlog.log('')  # newline
+        main_summary = self.summary.pop('', None)
+        for subp_name, summary in sorted(self.summary.items()):
+            if self.subprojects[subp_name].found():
+                summary.dump()
+        if main_summary:
+            main_summary.dump()
+
+    @noArgsFlattening
+    @FeatureNew('warning', '0.44.0')
+    @noKwargs
+    def func_warning(self, node, args, kwargs):
+        if len(args) > 1:
+            FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject)
+        args_str = [stringifyUserArguments(i) for i in args]
+        mlog.warning(*args_str, location=node)
+
+    @noArgsFlattening
+    @noKwargs
+    def func_error(self, node, args, kwargs):
+        if len(args) > 1:
+            FeatureNew.single_use('error with more than one argument', '0.58.0', self.subproject)
+        args_str = [stringifyUserArguments(i) for i in args]
+        raise InterpreterException('Problem encountered: ' + ' '.join(args_str))
+
+    @noKwargs
+    @noPosargs
+    def func_exception(self, node, args, kwargs):
+        raise Exception()
+
+    def add_languages(self, args: T.Sequence[str], required: bool, for_machine: MachineChoice) -> bool:
+        success = self.add_languages_for(args, required, for_machine)
+        if not self.coredata.is_cross_build():
+            self.coredata.copy_build_options_from_regular_ones()
+        self._redetect_machines()
+        return success
+
+    def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
+        should = self.environment.properties.host.get('skip_sanity_check', False)
+        if not isinstance(should, bool):
+            raise InterpreterException('Option skip_sanity_check must be a boolean.')
+        if for_machine != MachineChoice.HOST and not should:
+            return False
+        if not self.environment.is_cross_build() and not should:
+            return False
+        return should
+
+    def add_languages_for(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> None:
+        args = [a.lower() for a in args]
+        langs = set(self.coredata.compilers[for_machine].keys())
+        langs.update(args)
+        if ('vala' in langs or 'cython' in langs) and 'c' not in langs:
+            if 'vala' in langs:
+                FeatureNew.single_use('Adding Vala language without C', '0.59.0', self.subproject)
+            args.append('c')
+
+        success = True
+        for lang in sorted(args, key=compilers.sort_clink):
+            clist = self.coredata.compilers[for_machine]
+            machine_name = for_machine.get_lower_case_name()
+            if lang in clist:
+                comp = clist[lang]
+            else:
+                try:
+                    comp = compilers.detect_compiler_for(self.environment, lang, for_machine)
+                    if comp is None:
+                        raise InvalidArguments('Tried to use unknown language "%s".' % lang)
+                    if self.should_skip_sanity_check(for_machine):
+                        mlog.log_once('Cross compiler sanity tests disabled via the cross file.')
+                    else:
+                        comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
+                except Exception:
+                    if not required:
+                        mlog.log('Compiler for language',
+                                 mlog.bold(lang), 'for the', machine_name,
+                                 'machine not found.')
+                        success = False
+                        continue
+                    else:
+                        raise
+
+            if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
+                logger_fun = mlog.log
+            else:
+                logger_fun = mlog.debug
+            logger_fun(comp.get_display_language(), 'compiler for the', machine_name, 'machine:',
+                       mlog.bold(' '.join(comp.get_exelist())), comp.get_version_string())
+            if comp.linker is not None:
+                logger_fun(comp.get_display_language(), 'linker for the', machine_name, 'machine:',
+                           mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
+            self.build.ensure_static_linker(comp)
+
+        return success
+
+    def program_from_file_for(self, for_machine, prognames):
+        for p in prognames:
+            if isinstance(p, mesonlib.File):
+                continue # Always points to a local (i.e. self generated) file.
+            if not isinstance(p, str):
+                raise InterpreterException('Executable name must be a string')
+            prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
+            if prog.found():
+                return prog
+        return None
+
+    def program_from_system(self, args, search_dirs, extra_info):
+        # Search for scripts relative to current subdir.
+        # Do not cache found programs because find_program('foobar')
+        # might give different results when run from different source dirs.
+        source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
+        for exename in args:
+            if isinstance(exename, mesonlib.File):
+                if exename.is_built:
+                    search_dir = os.path.join(self.environment.get_build_dir(),
+                                              exename.subdir)
+                else:
+                    search_dir = os.path.join(self.environment.get_source_dir(),
+                                              exename.subdir)
+                exename = exename.fname
+                extra_search_dirs = []
+            elif isinstance(exename, str):
+                search_dir = source_dir
+                extra_search_dirs = search_dirs
+            else:
+                raise InvalidArguments(f'find_program only accepts strings and files, not {exename!r}')
+            extprog = ExternalProgram(exename, search_dir=search_dir,
+                                      extra_search_dirs=extra_search_dirs,
+                                      silent=True)
+            if extprog.found():
+                extra_info.append(f"({' '.join(extprog.get_command())})")
+                return extprog
+
+    def program_from_overrides(self, command_names, extra_info):
+        for name in command_names:
+            if not isinstance(name, str):
+                continue
+            if name in self.build.find_overrides:
+                exe = self.build.find_overrides[name]
+                extra_info.append(mlog.blue('(overridden)'))
+                return exe
+        return None
+
+    def store_name_lookups(self, command_names):
+        for name in command_names:
+            if isinstance(name, str):
+                self.build.searched_programs.add(name)
+
+    def add_find_program_override(self, name, exe):
+        if name in self.build.searched_programs:
+            raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.')
+        if name in self.build.find_overrides:
+            raise InterpreterException(f'Tried to override executable "{name}" which has already been overridden.')
+        self.build.find_overrides[name] = exe
+
+    def notfound_program(self, args):
+        return NonExistingExternalProgram(' '.join(args))
+
+    # TODO update modules to always pass `for_machine`. It is bad-form to assume
+    # the host machine.
+    def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST,
+                          required=True, silent=True, wanted='', search_dirs=None,
+                          version_func=None):
+        args = mesonlib.listify(args)
+
+        extra_info = []
+        progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
+        if progobj is None:
+            progobj = self.notfound_program(args)
+
+        if isinstance(progobj, ExternalProgram) and not progobj.found():
+            mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+            if required:
+                m = 'Program {!r} not found'
+                raise InterpreterException(m.format(progobj.get_name()))
+            return progobj
+
+        if wanted:
+            if version_func:
+                version = version_func(progobj)
+            elif isinstance(progobj, build.Executable):
+                interp = self
+                if progobj.subproject:
+                    interp = self.subprojects[progobj.subproject].held_object
+                    assert isinstance(interp, Interpreter)
+                version = interp.project_version
+            elif isinstance(progobj, ExternalProgram):
+                version = progobj.get_version(self)
+            is_found, not_found, found = mesonlib.version_compare_many(version, wanted)
+            if not is_found:
+                mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.red('NO'),
+                         'found', mlog.normal_cyan(version), 'but need:',
+                         mlog.bold(', '.join([f"'{e}'" for e in not_found])), *extra_info)
+                if required:
+                    m = 'Invalid version of program, need {!r} {!r} found {!r}.'
+                    raise InterpreterException(m.format(progobj.name, not_found, version))
+                return self.notfound_program(args)
+            extra_info.insert(0, mlog.normal_cyan(version))
+
+        # Only store successful lookups
+        self.store_name_lookups(args)
+        mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.green('YES'), *extra_info)
+        if isinstance(progobj, build.Executable):
+            progobj.was_returned_by_find_program = True
+        return progobj
+
+    def program_lookup(self, args, for_machine, required, search_dirs, extra_info):
+        progobj = self.program_from_overrides(args, extra_info)
+        if progobj:
+            return progobj
+
+        fallback = None
+        wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+        if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+            fallback = self.environment.wrap_resolver.find_program_provider(args)
+        if fallback and wrap_mode == WrapMode.forcefallback:
+            return self.find_program_fallback(fallback, args, required, extra_info)
+
+        progobj = self.program_from_file_for(for_machine, args)
+        if progobj is None:
+            progobj = self.program_from_system(args, search_dirs, extra_info)
+        if progobj is None and args[0].endswith('python3'):
+            prog = ExternalProgram('python3', mesonlib.python_command, silent=True)
+            progobj = prog if prog.found() else None
+        if progobj is None and fallback and required:
+            progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
+        return progobj
+
+    def find_program_fallback(self, fallback, args, required, extra_info):
+        mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+                 mlog.bold(' '.join(args)))
+        sp_kwargs = { 'required': required }
+        self.do_subproject(fallback, 'meson', sp_kwargs)
+        return self.program_from_overrides(args, extra_info)
+
+    @FeatureNewKwargs('find_program', '0.53.0', ['dirs'])
+    @FeatureNewKwargs('find_program', '0.52.0', ['version'])
+    @FeatureNewKwargs('find_program', '0.49.0', ['disabler'])
+    @disablerIfNotFound
+    @permittedKwargs({'required', 'native', 'version', 'dirs'})
+    def func_find_program(self, node, args, kwargs):
+        if not args:
+            raise InterpreterException('No program name specified.')
+
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.notfound_program(args)
+
+        search_dirs = extract_search_dirs(kwargs)
+        wanted = mesonlib.stringlistify(kwargs.get('version', []))
+        for_machine = self.machine_from_native_kwarg(kwargs)
+        return self.find_program_impl(args, for_machine, required=required,
+                                      silent=False, wanted=wanted,
+                                      search_dirs=search_dirs)
+
+    def func_find_library(self, node, args, kwargs):
+        raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
+                          'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
+                          'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
+                          )
+
+    # When adding kwargs, please check if they make sense in dependencies.get_dep_identifier()
+    @FeatureNewKwargs('dependency', '0.57.0', ['cmake_package_version'])
+    @FeatureNewKwargs('dependency', '0.56.0', ['allow_fallback'])
+    @FeatureNewKwargs('dependency', '0.54.0', ['components'])
+    @FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
+    @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
+    @FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
+    @FeatureNewKwargs('dependency', '0.40.0', ['method'])
+    @FeatureNewKwargs('dependency', '0.38.0', ['default_options'])
+    @disablerIfNotFound
+    @permittedKwargs(permitted_dependency_kwargs)
+    @typed_pos_args('dependency', str)
+    def func_dependency(self, node, args, kwargs):
+        # Replace '' by empty list of names
+        names = [args[0]] if args[0] else []
+        allow_fallback = kwargs.get('allow_fallback')
+        if allow_fallback is not None and not isinstance(allow_fallback, bool):
+            raise InvalidArguments('"allow_fallback" argument must be boolean')
+        fallback = kwargs.get('fallback')
+        default_options = kwargs.get('default_options')
+        df = DependencyFallbacksHolder(self, names, allow_fallback, default_options)
+        df.set_fallback(fallback)
+        not_found_message = kwargs.get('not_found_message', '')
+        if not isinstance(not_found_message, str):
+            raise InvalidArguments('The not_found_message must be a string.')
+        try:
+            d = df.lookup(kwargs)
+        except Exception:
+            if not_found_message:
+                self.message_impl([not_found_message])
+            raise
+        assert isinstance(d, Dependency)
+        if not d.found() and not_found_message:
+            self.message_impl([not_found_message])
+            self.message_impl([not_found_message])
+        # Ensure the correct include type
+        if 'include_type' in kwargs:
+            wanted = kwargs['include_type']
+            if not isinstance(wanted, str):
+                raise InvalidArguments('The `include_type` kwarg must be a string')
+            actual = d.get_include_type()
+            if wanted != actual:
+                mlog.debug(f'Current include type of {args[0]} is {actual}. Converting to requested {wanted}')
+                d = d.generate_system_dependency(wanted)
+        return d
+
+    @FeatureNew('disabler', '0.44.0')
+    @noKwargs
+    @noPosargs
+    def func_disabler(self, node, args, kwargs):
+        return Disabler()
+
+    @FeatureNewKwargs('executable', '0.42.0', ['implib'])
+    @FeatureNewKwargs('executable', '0.56.0', ['win_subsystem'])
+    @FeatureDeprecatedKwargs('executable', '0.56.0', ['gui_app'], extra_message="Use 'win_subsystem' instead.")
+    @permittedKwargs(build.known_exe_kwargs)
+    def func_executable(self, node, args, kwargs):
+        return self.build_target(node, args, kwargs, build.Executable)
+
+    @permittedKwargs(build.known_stlib_kwargs)
+    def func_static_lib(self, node, args, kwargs):
+        return self.build_target(node, args, kwargs, build.StaticLibrary)
+
+    @permittedKwargs(build.known_shlib_kwargs)
+    def func_shared_lib(self, node, args, kwargs):
+        holder = self.build_target(node, args, kwargs, build.SharedLibrary)
+        holder.shared_library_only = True
+        return holder
+
+    @permittedKwargs(known_library_kwargs)
+    def func_both_lib(self, node, args, kwargs):
+        return self.build_both_libraries(node, args, kwargs)
+
+    @FeatureNew('shared_module', '0.37.0')
+    @permittedKwargs(build.known_shmod_kwargs)
+    def func_shared_module(self, node, args, kwargs):
+        return self.build_target(node, args, kwargs, build.SharedModule)
+
+    @permittedKwargs(known_library_kwargs)
+    def func_library(self, node, args, kwargs):
+        return self.build_library(node, args, kwargs)
+
+    @permittedKwargs(build.known_jar_kwargs)
+    def func_jar(self, node, args, kwargs):
+        return self.build_target(node, args, kwargs, build.Jar)
+
+    @FeatureNewKwargs('build_target', '0.40.0', ['link_whole', 'override_options'])
+    @permittedKwargs(known_build_target_kwargs)
+    def func_build_target(self, node, args, kwargs):
+        if 'target_type' not in kwargs:
+            raise InterpreterException('Missing target_type keyword argument')
+        target_type = kwargs.pop('target_type')
+        if target_type == 'executable':
+            return self.build_target(node, args, kwargs, build.Executable)
+        elif target_type == 'shared_library':
+            return self.build_target(node, args, kwargs, build.SharedLibrary)
+        elif target_type == 'shared_module':
+            FeatureNew('build_target(target_type: \'shared_module\')',
+                       '0.51.0').use(self.subproject)
+            return self.build_target(node, args, kwargs, build.SharedModule)
+        elif target_type == 'static_library':
+            return self.build_target(node, args, kwargs, build.StaticLibrary)
+        elif target_type == 'both_libraries':
+            return self.build_both_libraries(node, args, kwargs)
+        elif target_type == 'library':
+            return self.build_library(node, args, kwargs)
+        elif target_type == 'jar':
+            return self.build_target(node, args, kwargs, build.Jar)
+        else:
+            raise InterpreterException('Unknown target_type.')
+
+    @permittedKwargs({'input', 'output', 'fallback', 'command', 'replace_string'})
+    @FeatureDeprecatedKwargs('custom_target', '0.47.0', ['build_always'],
+                             'combine build_by_default and build_always_stale instead.')
+    @noPosargs
+    def func_vcs_tag(self, node, args, kwargs):
+        if 'input' not in kwargs or 'output' not in kwargs:
+            raise InterpreterException('Keyword arguments input and output must exist')
+        if 'fallback' not in kwargs:
+            FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject)
+        fallback = kwargs.pop('fallback', self.project_version)
+        if not isinstance(fallback, str):
+            raise InterpreterException('Keyword argument fallback must be a string.')
+        replace_string = kwargs.pop('replace_string', '@VCS_TAG@')
+        regex_selector = '(.*)' # default regex selector for custom command: use complete output
+        vcs_cmd = kwargs.get('command', None)
+        if vcs_cmd and not isinstance(vcs_cmd, list):
+            vcs_cmd = [vcs_cmd]
+        source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
+        if vcs_cmd:
+            # Is the command an executable in path or maybe a script in the source tree?
+            vcs_cmd[0] = shutil.which(vcs_cmd[0]) or os.path.join(source_dir, vcs_cmd[0])
+        else:
+            vcs = mesonlib.detect_vcs(source_dir)
+            if vcs:
+                mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir']))
+                vcs_cmd = vcs['get_rev'].split()
+                regex_selector = vcs['rev_regex']
+            else:
+                vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
+        # vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
+        kwargs['command'] = self.environment.get_build_command() + \
+            ['--internal',
+             'vcstagger',
+             '@INPUT0@',
+             '@OUTPUT0@',
+             fallback,
+             source_dir,
+             replace_string,
+             regex_selector] + vcs_cmd
+        kwargs.setdefault('build_by_default', True)
+        kwargs.setdefault('build_always_stale', True)
+        return self._func_custom_target_impl(node, [kwargs['output']], kwargs)
+
+    @FeatureNew('subdir_done', '0.46.0')
+    @noPosargs
+    @noKwargs
+    def func_subdir_done(self, node, args, kwargs):
+        raise SubdirDoneRequest()
+
+    @stringArgs
+    @FeatureNewKwargs('custom_target', '0.57.0', ['env'])
+    @FeatureNewKwargs('custom_target', '0.48.0', ['console'])
+    @FeatureNewKwargs('custom_target', '0.47.0', ['install_mode', 'build_always_stale'])
+    @FeatureNewKwargs('custom_target', '0.40.0', ['build_by_default'])
+    @FeatureNewKwargs('custom_target', '0.59.0', ['feed'])
+    @permittedKwargs({'input', 'output', 'command', 'install', 'install_dir', 'install_mode',
+                      'build_always', 'capture', 'depends', 'depend_files', 'depfile',
+                      'build_by_default', 'build_always_stale', 'console', 'env',
+                      'feed'})
+    def func_custom_target(self, node, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
+        if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
+            FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject)
+        return self._func_custom_target_impl(node, args, kwargs)
+
+    def _func_custom_target_impl(self, node, args, kwargs):
+        'Implementation-only, without FeatureNew checks, for internal use'
+        name = args[0]
+        kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
+        if 'input' in kwargs:
+            try:
+                kwargs['input'] = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
+            except mesonlib.MesonException:
+                mlog.warning('''Custom target input \'%s\' can\'t be converted to File object(s).
+This will become a hard error in the future.''' % kwargs['input'], location=self.current_node)
+        kwargs['env'] = self.unpack_env_kwarg(kwargs)
+        if 'command' in kwargs and isinstance(kwargs['command'], list) and kwargs['command']:
+            if isinstance(kwargs['command'][0], str):
+                kwargs['command'][0] = self.func_find_program(node, kwargs['command'][0], {})
+        tg = build.CustomTarget(name, self.subdir, self.subproject, kwargs, backend=self.backend)
+        self.add_target(name, tg)
+        return tg
+
+    @FeatureNewKwargs('run_target', '0.57.0', ['env'])
+    @permittedKwargs({'command', 'depends', 'env'})
+    def func_run_target(self, node, args, kwargs):
+        if len(args) > 1:
+            raise InvalidCode('Run_target takes only one positional argument: the target name.')
+        elif len(args) == 1:
+            if 'command' not in kwargs:
+                raise InterpreterException('Missing "command" keyword argument')
+            all_args = extract_as_list(kwargs, 'command')
+            deps = extract_as_list(kwargs, 'depends')
+        else:
+            raise InterpreterException('Run_target needs at least one positional argument.')
+
+        cleaned_args = []
+        for i in listify(all_args):
+            if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, ExternalProgram, mesonlib.File)):
+                mlog.debug('Wrong type:', str(i))
+                raise InterpreterException('Invalid argument to run_target.')
+            if isinstance(i, ExternalProgram) and not i.found():
+                raise InterpreterException(f'Tried to use non-existing executable {i.name!r}')
+            cleaned_args.append(i)
+        if isinstance(cleaned_args[0], str):
+            cleaned_args[0] = self.func_find_program(node, cleaned_args[0], {})
+        name = args[0]
+        if not isinstance(name, str):
+            raise InterpreterException('First argument must be a string.')
+        cleaned_deps = []
+        for d in deps:
+            if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
+                raise InterpreterException('Depends items must be build targets.')
+            cleaned_deps.append(d)
+        env = self.unpack_env_kwarg(kwargs)
+        tg = build.RunTarget(name, cleaned_args, cleaned_deps, self.subdir, self.subproject, env)
+        self.add_target(name, tg)
+        full_name = (self.subproject, name)
+        assert(full_name not in self.build.run_target_names)
+        self.build.run_target_names.add(full_name)
+        return tg
+
+    @FeatureNew('alias_target', '0.52.0')
+    @noKwargs
+    def func_alias_target(self, node, args, kwargs):
+        if len(args) < 2:
+            raise InvalidCode('alias_target takes at least 2 arguments.')
+        name = args[0]
+        if not isinstance(name, str):
+            raise InterpreterException('First argument must be a string.')
+        deps = listify(args[1:])
+        for d in deps:
+            if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
+                raise InterpreterException('Depends items must be build targets.')
+        tg = build.AliasTarget(name, deps, self.subdir, self.subproject)
+        self.add_target(name, tg)
+        return tg
+
+    @permittedKwargs({'arguments', 'output', 'depends', 'depfile', 'capture',
+                      'preserve_path_from'})
+    @typed_pos_args('generator', (build.Executable, ExternalProgram))
+    @typed_kwargs(
+        'generator',
+        KwargInfo('arguments', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+        KwargInfo('output', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+        KwargInfo('depfile', str, validator=lambda x: 'Depfile must be a plain filename with a subdirectory' if has_path_sep(x) else None),
+        KwargInfo('capture', bool, default=False, since='0.43.0'),
+        KwargInfo('depends', ContainerTypeInfo(list, (build.BuildTarget, build.CustomTarget)), default=[], listify=True),
+    )
+    def func_generator(self, node: mparser.FunctionNode,
+                       args: T.Tuple[T.Union[build.Executable, ExternalProgram]],
+                       kwargs: 'kwargs.FuncGenerator') -> build.Generator:
+        for rule in kwargs['output']:
+            if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
+                raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
+            if has_path_sep(rule):
+                raise InvalidArguments('"output" must not contain a directory separator.')
+        if len(kwargs['output']) > 1:
+            for o in kwargs['output']:
+                if '@OUTPUT@' in o:
+                    raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
+
+        gen = build.Generator(args[0], **kwargs)
+        self.generators.append(gen)
+        return gen
+
+    @typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+    @typed_kwargs('benchmark', *TEST_KWARGS)
+    def func_benchmark(self, node: mparser.BaseNode,
+                       args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+                       kwargs: 'kwargs.FuncBenchmark') -> None:
+        self.add_test(node, args, kwargs, False)
+
+    @typed_pos_args('test', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+    @typed_kwargs('benchmark', *TEST_KWARGS, KwargInfo('is_parallel', bool, default=True))
+    def func_test(self, node: mparser.BaseNode,
+                  args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+                  kwargs: 'kwargs.FuncTest') -> None:
+        self.add_test(node, args, kwargs, True)
+
+    def unpack_env_kwarg(self, kwargs: T.Union[EnvironmentVariablesObject, T.Dict[str, str], T.List[str]]) -> build.EnvironmentVariables:
+        envlist = kwargs.get('env', EnvironmentVariablesObject())
+        if isinstance(envlist, EnvironmentVariablesObject):
+            env = envlist.vars
+        elif isinstance(envlist, dict):
+            FeatureNew.single_use('environment dictionary', '0.52.0', self.subproject)
+            env = EnvironmentVariablesObject(envlist)
+            env = env.vars
+        else:
+            # Convert from array to environment object
+            env = EnvironmentVariablesObject(envlist)
+            env = env.vars
+        return env
+
+    def make_test(self, node: mparser.BaseNode,
+                  args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+                  kwargs: 'kwargs.BaseTest') -> Test:
+        name = args[0]
+        if ':' in name:
+            mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"',
+                             location=node)
+            name = name.replace(':', '_')
+        exe = args[1]
+        if isinstance(exe, mesonlib.File):
+            exe = self.func_find_program(node, args[1], {})
+
+        env = self.unpack_env_kwarg(kwargs)
+
+        if kwargs['timeout'] <= 0:
+            FeatureNew.single_use('test() timeout <= 0', '0.57.0', self.subproject)
+
+        prj = self.subproject if self.is_subproject() else self.build.project_name
+
+        suite: T.List[str] = []
+        for s in kwargs['suite']:
+            if s:
+                s = ':' + s
+            suite.append(prj.replace(' ', '_').replace(':', '_') + s)
+
+        return Test(name,
+                    prj,
+                    suite,
+                    exe,
+                    kwargs['depends'],
+                    kwargs.get('is_parallel', False),
+                    kwargs['args'],
+                    env,
+                    kwargs['should_fail'],
+                    kwargs['timeout'],
+                    kwargs['workdir'],
+                    kwargs['protocol'],
+                    kwargs['priority'])
+
+    def add_test(self, node: mparser.BaseNode, args: T.List, kwargs: T.Dict[str, T.Any], is_base_test: bool):
+        t = self.make_test(node, args, kwargs)
+        if is_base_test:
+            self.build.tests.append(t)
+            mlog.debug('Adding test', mlog.bold(t.name, True))
+        else:
+            self.build.benchmarks.append(t)
+            mlog.debug('Adding benchmark', mlog.bold(t.name, True))
+
+    @typed_pos_args('install_headers', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'install_headers',
+        KwargInfo('install_dir', (str, None)),
+        KwargInfo('subdir', (str, None)),
+        _INSTALL_MODE_KW.evolve(since='0.47.0'),
+    )
+    def func_install_headers(self, node: mparser.BaseNode,
+                             args: T.Tuple[T.List['mesonlib.FileOrString']],
+                             kwargs: 'kwargs.FuncInstallHeaders') -> build.Headers:
+        source_files = self.source_strings_to_files(args[0])
+        install_subdir = kwargs['subdir']
+        if install_subdir is not None and os.path.isabs(install_subdir):
+            mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in the next release.')
+
+        h = build.Headers(source_files, install_subdir, kwargs['install_dir'],
+                          kwargs['install_mode'], self.subproject)
+        self.build.headers.append(h)
+
+        return h
+
+    @typed_pos_args('install_man', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'install_man',
+        KwargInfo('install_dir', (str, None)),
+        KwargInfo('locale', (str, None), since='0.58.0'),
+        _INSTALL_MODE_KW.evolve(since='0.47.0')
+    )
+    def func_install_man(self, node: mparser.BaseNode,
+                         args: T.Tuple[T.List['mesonlib.FileOrString']],
+                         kwargs: 'kwargs.FuncInstallMan') -> build.Man:
+        # We just need to narrow this, because the input is limited to files and
+        # Strings as inputs, so only Files will be returned
+        sources = self.source_strings_to_files(args[0])
+        for s in sources:
+            try:
+                num = int(s.rsplit('.', 1)[-1])
+            except (IndexError, ValueError):
+                num = 0
+            if not 1 <= num <= 9:
+                raise InvalidArguments('Man file must have a file extension of a number between 1 and 9')
+
+        m = build.Man(sources, kwargs['install_dir'], kwargs['install_mode'],
+                      self.subproject, kwargs['locale'])
+        self.build.man.append(m)
+
+        return m
+
+    @FeatureNewKwargs('subdir', '0.44.0', ['if_found'])
+    @permittedKwargs({'if_found'})
+    def func_subdir(self, node, args, kwargs):
+        self.validate_arguments(args, 1, [str])
+        mesonlib.check_direntry_issues(args)
+        if '..' in args[0]:
+            raise InvalidArguments('Subdir contains ..')
+        if self.subdir == '' and args[0] == self.subproject_dir:
+            raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
+        if self.subdir == '' and args[0].startswith('meson-'):
+            raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
+        for i in mesonlib.extract_as_list(kwargs, 'if_found'):
+            if not hasattr(i, 'found'):
+                raise InterpreterException('Object used in if_found does not have a found method.')
+            if not i.found():
+                return
+        prev_subdir = self.subdir
+        subdir = os.path.join(prev_subdir, args[0])
+        if os.path.isabs(subdir):
+            raise InvalidArguments('Subdir argument must be a relative path.')
+        absdir = os.path.join(self.environment.get_source_dir(), subdir)
+        symlinkless_dir = os.path.realpath(absdir)
+        build_file = os.path.join(symlinkless_dir, 'meson.build')
+        if build_file in self.processed_buildfiles:
+            raise InvalidArguments('Tried to enter directory "%s", which has already been visited.'
+                                   % subdir)
+        self.processed_buildfiles.add(build_file)
+        self.subdir = subdir
+        os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
+        buildfilename = os.path.join(self.subdir, environment.build_filename)
+        self.build_def_files.append(buildfilename)
+        absname = os.path.join(self.environment.get_source_dir(), buildfilename)
+        if not os.path.isfile(absname):
+            self.subdir = prev_subdir
+            raise InterpreterException(f"Non-existent build file '{buildfilename!s}'")
+        with open(absname, encoding='utf-8') as f:
+            code = f.read()
+        assert(isinstance(code, str))
+        try:
+            codeblock = mparser.Parser(code, absname).parse()
+        except mesonlib.MesonException as me:
+            me.file = absname
+            raise me
+        try:
+            self.evaluate_codeblock(codeblock)
+        except SubdirDoneRequest:
+            pass
+        self.subdir = prev_subdir
+
+    def _get_kwarg_install_mode(self, kwargs: T.Dict[str, T.Any]) -> T.Optional[FileMode]:
+        if kwargs.get('install_mode', None) is None:
+            return None
+        install_mode: T.List[str] = []
+        mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))
+        for m in mode:
+            # We skip any arguments that are set to `false`
+            if m is False:
+                m = None
+            install_mode.append(m)
+        if len(install_mode) > 3:
+            raise InvalidArguments('Keyword argument install_mode takes at '
+                                   'most 3 arguments.')
+        if len(install_mode) > 0 and install_mode[0] is not None and \
+           not isinstance(install_mode[0], str):
+            raise InvalidArguments('Keyword argument install_mode requires the '
+                                   'permissions arg to be a string or false')
+        return FileMode(*install_mode)
+
+    @typed_pos_args('install_data', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'install_data',
+        KwargInfo('install_dir', str),
+        KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File)), listify=True, default=[]),
+        KwargInfo('rename', ContainerTypeInfo(list, str), default=[], listify=True, since='0.46.0'),
+        _INSTALL_MODE_KW.evolve(since='0.38.0'),
+    )
+    def func_install_data(self, node: mparser.BaseNode,
+                          args: T.Tuple[T.List['mesonlib.FileOrString']],
+                          kwargs: 'kwargs.FuncInstallData') -> build.Data:
+        sources = self.source_strings_to_files(args[0] + kwargs['sources'])
+        rename = kwargs['rename'] or None
+        if rename:
+            if len(rename) != len(sources):
+                raise InvalidArguments(
+                    '"rename" and "sources" argument lists must be the same length if "rename" is given. '
+                    f'Rename has {len(rename)} elements and sources has {len(sources)}.')
+
+        data = build.Data(
+            sources, kwargs['install_dir'], kwargs['install_mode'],
+            self.subproject, rename)
+        self.build.data.append(data)
+        return data
+
+    @typed_pos_args('install_subdir', str)
+    @typed_kwargs(
+        'install_subdir',
+        KwargInfo('install_dir', str, required=True),
+        KwargInfo('strip_directory', bool, default=False),
+        KwargInfo('exclude_files', ContainerTypeInfo(list, str),
+                  default=[], listify=True, since='0.42.0',
+                  validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+        KwargInfo('exclude_directories', ContainerTypeInfo(list, str),
+                  default=[], listify=True, since='0.42.0',
+                  validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+        _INSTALL_MODE_KW.evolve(since='0.38.0'),
+    )
+    def func_install_subdir(self, node: mparser.BaseNode, args: T.Tuple[str],
+                            kwargs: 'kwargs.FuncInstallSubdir') -> build.InstallDir:
+        exclude = (set(kwargs['exclude_files']), set(kwargs['exclude_directories']))
+        idir = build.InstallDir(
+            self.subdir,
+            args[0],
+            kwargs['install_dir'],
+            kwargs['install_mode'],
+            exclude,
+            kwargs['strip_directory'],
+            self.subproject)
+        self.build.install_dirs.append(idir)
+        return idir
+
+    @FeatureNewKwargs('configure_file', '0.47.0', ['copy', 'output_format', 'install_mode', 'encoding'])
+    @FeatureNewKwargs('configure_file', '0.46.0', ['format'])
+    @FeatureNewKwargs('configure_file', '0.41.0', ['capture'])
+    @FeatureNewKwargs('configure_file', '0.50.0', ['install'])
+    @FeatureNewKwargs('configure_file', '0.52.0', ['depfile'])
+    @permittedKwargs({'input', 'output', 'configuration', 'command', 'copy', 'depfile',
+                      'install_dir', 'install_mode', 'capture', 'install', 'format',
+                      'output_format', 'encoding'})
+    @noPosargs
+    def func_configure_file(self, node, args, kwargs):
+        if 'output' not in kwargs:
+            raise InterpreterException('Required keyword argument "output" not defined.')
+        actions = {'configuration', 'command', 'copy'}.intersection(kwargs.keys())
+        if len(actions) == 0:
+            raise InterpreterException('Must specify an action with one of these '
+                                       'keyword arguments: \'configuration\', '
+                                       '\'command\', or \'copy\'.')
+        elif len(actions) == 2:
+            raise InterpreterException('Must not specify both {!r} and {!r} '
+                                       'keyword arguments since they are '
+                                       'mutually exclusive.'.format(*actions))
+        elif len(actions) == 3:
+            raise InterpreterException('Must specify one of {!r}, {!r}, and '
+                                       '{!r} keyword arguments since they are '
+                                       'mutually exclusive.'.format(*actions))
+        if 'capture' in kwargs:
+            if not isinstance(kwargs['capture'], bool):
+                raise InterpreterException('"capture" keyword must be a boolean.')
+            if 'command' not in kwargs:
+                raise InterpreterException('"capture" keyword requires "command" keyword.')
+
+        if 'format' in kwargs:
+            fmt = kwargs['format']
+            if not isinstance(fmt, str):
+                raise InterpreterException('"format" keyword must be a string.')
+        else:
+            fmt = 'meson'
+
+        if fmt not in ('meson', 'cmake', 'cmake@'):
+            raise InterpreterException('"format" possible values are "meson", "cmake" or "cmake@".')
+
+        if 'output_format' in kwargs:
+            output_format = kwargs['output_format']
+            if not isinstance(output_format, str):
+                raise InterpreterException('"output_format" keyword must be a string.')
+        else:
+            output_format = 'c'
+
+        if output_format not in ('c', 'nasm'):
+            raise InterpreterException('"format" possible values are "c" or "nasm".')
+
+        if 'depfile' in kwargs:
+            depfile = kwargs['depfile']
+            if not isinstance(depfile, str):
+                raise InterpreterException('depfile file name must be a string')
+        else:
+            depfile = None
+
+        # Validate input
+        inputs = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
+        inputs_abs = []
+        for f in inputs:
+            if isinstance(f, mesonlib.File):
+                inputs_abs.append(f.absolute_path(self.environment.source_dir,
+                                                  self.environment.build_dir))
+                self.add_build_def_file(f)
+            else:
+                raise InterpreterException('Inputs can only be strings or file objects')
+        # Validate output
+        output = kwargs['output']
+        if not isinstance(output, str):
+            raise InterpreterException('Output file name must be a string')
+        if inputs_abs:
+            values = mesonlib.get_filenames_templates_dict(inputs_abs, None)
+            outputs = mesonlib.substitute_values([output], values)
+            output = outputs[0]
+            if depfile:
+                depfile = mesonlib.substitute_values([depfile], values)[0]
+        ofile_rpath = os.path.join(self.subdir, output)
+        if ofile_rpath in self.configure_file_outputs:
+            mesonbuildfile = os.path.join(self.subdir, 'meson.build')
+            current_call = f"{mesonbuildfile}:{self.current_lineno}"
+            first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
+            mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
+        else:
+            self.configure_file_outputs[ofile_rpath] = self.current_lineno
+        if os.path.dirname(output) != '':
+            raise InterpreterException('Output file name must not contain a subdirectory.')
+        (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
+        ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
+        # Perform the appropriate action
+        if 'configuration' in kwargs:
+            conf = kwargs['configuration']
+            if isinstance(conf, dict):
+                FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject)
+                conf = ConfigurationDataObject(self.subproject, conf)
+            elif not isinstance(conf, ConfigurationDataObject):
+                raise InterpreterException('Argument "configuration" is not of type configuration_data')
+            mlog.log('Configuring', mlog.bold(output), 'using configuration')
+            if len(inputs) > 1:
+                raise InterpreterException('At most one input file can given in configuration mode')
+            if inputs:
+                os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+                file_encoding = kwargs.setdefault('encoding', 'utf-8')
+                missing_variables, confdata_useless = \
+                    mesonlib.do_conf_file(inputs_abs[0], ofile_abs, conf.conf_data,
+                                          fmt, file_encoding)
+                if missing_variables:
+                    var_list = ", ".join(map(repr, sorted(missing_variables)))
+                    mlog.warning(
+                        "The variable(s) %s in the input file '%s' are not "
+                        "present in the given configuration data." % (
+                            var_list, inputs[0]), location=node)
+                if confdata_useless:
+                    ifbase = os.path.basename(inputs_abs[0])
+                    mlog.warning('Got an empty configuration_data() object and found no '
+                                 f'substitutions in the input file {ifbase!r}. If you want to '
+                                 'copy a file to the build dir, use the \'copy:\' keyword '
+                                 'argument added in 0.47.0', location=node)
+            else:
+                mesonlib.dump_conf_header(ofile_abs, conf.conf_data, output_format)
+            conf.mark_used()
+        elif 'command' in kwargs:
+            if len(inputs) > 1:
+                FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject)
+            # We use absolute paths for input and output here because the cwd
+            # that the command is run from is 'unspecified', so it could change.
+            # Currently it's builddir/subdir for in_builddir else srcdir/subdir.
+            values = mesonlib.get_filenames_templates_dict(inputs_abs, [ofile_abs])
+            if depfile:
+                depfile = os.path.join(self.environment.get_scratch_dir(), depfile)
+                values['@DEPFILE@'] = depfile
+            # Substitute @INPUT@, @OUTPUT@, etc here.
+            cmd = mesonlib.substitute_values(kwargs['command'], values)
+            mlog.log('Configuring', mlog.bold(output), 'with command')
+            res = self.run_command_impl(node, cmd,  {}, True)
+            if res.returncode != 0:
+                raise InterpreterException('Running configure command failed.\n%s\n%s' %
+                                           (res.stdout, res.stderr))
+            if 'capture' in kwargs and kwargs['capture']:
+                dst_tmp = ofile_abs + '~'
+                file_encoding = kwargs.setdefault('encoding', 'utf-8')
+                with open(dst_tmp, 'w', encoding=file_encoding) as f:
+                    f.writelines(res.stdout)
+                if inputs_abs:
+                    shutil.copymode(inputs_abs[0], dst_tmp)
+                mesonlib.replace_if_different(ofile_abs, dst_tmp)
+            if depfile:
+                mlog.log('Reading depfile:', mlog.bold(depfile))
+                with open(depfile, encoding='utf-8') as f:
+                    df = DepFile(f.readlines())
+                    deps = df.get_all_dependencies(ofile_fname)
+                    for dep in deps:
+                        self.add_build_def_file(dep)
+
+        elif 'copy' in kwargs:
+            if len(inputs_abs) != 1:
+                raise InterpreterException('Exactly one input file must be given in copy mode')
+            os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+            shutil.copy2(inputs_abs[0], ofile_abs)
+        else:
+            # Not reachable
+            raise AssertionError
+        # Install file if requested, we check for the empty string
+        # for backwards compatibility. That was the behaviour before
+        # 0.45.0 so preserve it.
+        idir = kwargs.get('install_dir', '')
+        if idir is False:
+            idir = ''
+            mlog.deprecation('Please use the new `install:` kwarg instead of passing '
+                             '`false` to `install_dir:`', location=node)
+        if not isinstance(idir, str):
+            if isinstance(idir, list) and len(idir) == 0:
+                mlog.deprecation('install_dir: kwarg must be a string and not an empty array. '
+                                 'Please use the install: kwarg to enable or disable installation. '
+                                 'This will be a hard error in the next release.')
+            else:
+                raise InterpreterException('"install_dir" must be a string')
+        install = kwargs.get('install', idir != '')
+        if not isinstance(install, bool):
+            raise InterpreterException('"install" must be a boolean')
+        if install:
+            if not idir:
+                raise InterpreterException('"install_dir" must be specified '
+                                           'when "install" in a configure_file '
+                                           'is true')
+            cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
+            install_mode = self._get_kwarg_install_mode(kwargs)
+            self.build.data.append(build.Data([cfile], idir, install_mode, self.subproject))
+        return mesonlib.File.from_built_file(self.subdir, output)
+
+    def extract_incdirs(self, kwargs):
+        prospectives = extract_as_list(kwargs, 'include_directories')
+        result = []
+        for p in prospectives:
+            if isinstance(p, build.IncludeDirs):
+                result.append(p)
+            elif isinstance(p, str):
+                result.append(self.build_incdir_object([p]))
+            else:
+                raise InterpreterException('Include directory objects can only be created from strings or include directories.')
+        return result
+
+    @permittedKwargs({'is_system'})
+    @stringArgs
+    def func_include_directories(self, node, args, kwargs):
+        return self.build_incdir_object(args, kwargs.get('is_system', False))
+
+    def build_incdir_object(self, incdir_strings: T.List[str], is_system: bool = False) -> build.IncludeDirs:
+        if not isinstance(is_system, bool):
+            raise InvalidArguments('Is_system must be boolean.')
+        src_root = self.environment.get_source_dir()
+        build_root = self.environment.get_build_dir()
+        absbase_src = os.path.join(src_root, self.subdir)
+        absbase_build = os.path.join(build_root, self.subdir)
+
+        for a in incdir_strings:
+            if a.startswith(src_root):
+                raise InvalidArguments(textwrap.dedent('''\
+                    Tried to form an absolute path to a source dir.
+                    You should not do that but use relative paths instead.
+
+                    To get include path to any directory relative to the current dir do
+
+                    incdir = include_directories(dirname)
+
+                    After this incdir will contain both the current source dir as well as the
+                    corresponding build dir. It can then be used in any subdirectory and
+                    Meson will take care of all the busywork to make paths work.
+
+                    Dirname can even be '.' to mark the current directory. Though you should
+                    remember that the current source and build directories are always
+                    put in the include directories by default so you only need to do
+                    include_directories('.') if you intend to use the result in a
+                    different subdirectory.
+                    '''))
+            else:
+                try:
+                    self.validate_within_subproject(self.subdir, a)
+                except InterpreterException:
+                    mlog.warning('include_directories sandbox violation!')
+                    print(textwrap.dedent(f'''\
+                        The project is trying to access the directory {a} which belongs to a different
+                        subproject. This is a problem as it hardcodes the relative paths of these two projeccts.
+                        This makes it impossible to compile the project in any other directory layout and also
+                        prevents the subproject from changing its own directory layout.
+
+                        Instead of poking directly at the internals the subproject should be executed and
+                        it should set a variable that the caller can then use. Something like:
+
+                        # In subproject
+                        some_dep = declare_depencency(include_directories: include_directories('include'))
+
+                        # In parent project
+                        some_dep = depencency('some')
+                        executable(..., dependencies: [some_dep])
+
+                        This warning will become a hard error in a future Meson release.
+                        '''))
+            absdir_src = os.path.join(absbase_src, a)
+            absdir_build = os.path.join(absbase_build, a)
+            if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
+                raise InvalidArguments('Include dir %s does not exist.' % a)
+        i = build.IncludeDirs(self.subdir, incdir_strings, is_system)
+        return i
+
+    @permittedKwargs({'exe_wrapper', 'gdb', 'timeout_multiplier', 'env', 'is_default',
+                      'exclude_suites'})
+    @stringArgs
+    def func_add_test_setup(self, node, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Add_test_setup needs one argument for the setup name.')
+        setup_name = args[0]
+        if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
+            raise InterpreterException('Setup name may only contain alphanumeric characters.')
+        if ":" not in setup_name:
+            setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name
+        try:
+            inp = extract_as_list(kwargs, 'exe_wrapper')
+            exe_wrapper = []
+            for i in inp:
+                if isinstance(i, str):
+                    exe_wrapper.append(i)
+                elif isinstance(i, ExternalProgram):
+                    if not i.found():
+                        raise InterpreterException('Tried to use non-found executable.')
+                    exe_wrapper += i.get_command()
+                else:
+                    raise InterpreterException('Exe wrapper can only contain strings or external binaries.')
+        except KeyError:
+            exe_wrapper = None
+        gdb = kwargs.get('gdb', False)
+        if not isinstance(gdb, bool):
+            raise InterpreterException('Gdb option must be a boolean')
+        timeout_multiplier = kwargs.get('timeout_multiplier', 1)
+        if not isinstance(timeout_multiplier, int):
+            raise InterpreterException('Timeout multiplier must be a number.')
+        if timeout_multiplier <= 0:
+            FeatureNew('add_test_setup() timeout_multiplier <= 0', '0.57.0').use(self.subproject)
+        is_default = kwargs.get('is_default', False)
+        if not isinstance(is_default, bool):
+            raise InterpreterException('is_default option must be a boolean')
+        if is_default:
+            if self.build.test_setup_default_name is not None:
+                raise InterpreterException('\'%s\' is already set as default. '
+                                           'is_default can be set to true only once' % self.build.test_setup_default_name)
+            self.build.test_setup_default_name = setup_name
+        exclude_suites = mesonlib.stringlistify(kwargs.get('exclude_suites', []))
+        env = self.unpack_env_kwarg(kwargs)
+        self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper, gdb, timeout_multiplier, env,
+                                                             exclude_suites)
+
+    @typed_pos_args('add_global_arguments', varargs=str)
+    @typed_kwargs('add_global_arguments', _NATIVE_KW, _LANGUAGE_KW)
+    def func_add_global_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        self._add_global_arguments(node, self.build.global_args[kwargs['native']], args[0], kwargs)
+
+    @typed_pos_args('add_global_link_arguments', varargs=str)
+    @typed_kwargs('add_global_arguments', _NATIVE_KW, _LANGUAGE_KW)
+    def func_add_global_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        self._add_global_arguments(node, self.build.global_link_args[kwargs['native']], args[0], kwargs)
+
+    @typed_pos_args('add_project_arguments', varargs=str)
+    @typed_kwargs('add_project_arguments', _NATIVE_KW, _LANGUAGE_KW)
+    def func_add_project_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        self._add_project_arguments(node, self.build.projects_args[kwargs['native']], args[0], kwargs)
+
+    @typed_pos_args('add_project_link_arguments', varargs=str)
+    @typed_kwargs('add_global_arguments', _NATIVE_KW, _LANGUAGE_KW)
+    def func_add_project_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        self._add_project_arguments(node, self.build.projects_link_args[kwargs['native']], args[0], kwargs)
+
+    def _warn_about_builtin_args(self, args: T.List[str]) -> None:
+        # -Wpedantic is deliberately not included, since some people want to use it but not use -Wextra
+        # see e.g.
+        # https://github.com/mesonbuild/meson/issues/3275#issuecomment-641354956
+        # https://github.com/mesonbuild/meson/issues/3742
+        warnargs = ('/W1', '/W2', '/W3', '/W4', '/Wall', '-Wall', '-Wextra')
+        optargs = ('-O0', '-O2', '-O3', '-Os', '/O1', '/O2', '/Os')
+        for arg in args:
+            if arg in warnargs:
+                mlog.warning(f'Consider using the built-in warning_level option instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg in optargs:
+                mlog.warning(f'Consider using the built-in optimization level instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg == '-Werror':
+                mlog.warning(f'Consider using the built-in werror option instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg == '-g':
+                mlog.warning(f'Consider using the built-in debug option instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg.startswith('-fsanitize'):
+                mlog.warning(f'Consider using the built-in option for sanitizers instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg.startswith('-std=') or arg.startswith('/std:'):
+                mlog.warning(f'Consider using the built-in option for language standard version instead of using "{arg}".',
+                             location=self.current_node)
+
+    def _add_global_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+                              args: T.List[str], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        if self.is_subproject():
+            msg = f'Function \'{node.func_name}\' cannot be used in subprojects because ' \
+                  'there is no way to make that reliable.\nPlease only call ' \
+                  'this if is_subproject() returns false. Alternatively, ' \
+                  'define a variable that\ncontains your language-specific ' \
+                  'arguments and add it to the appropriate *_args kwarg ' \
+                  'in each target.'
+            raise InvalidCode(msg)
+        frozen = self.project_args_frozen or self.global_args_frozen
+        self._add_arguments(node, argsdict, frozen, args, kwargs)
+
+    def _add_project_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.Dict[str, T.List[str]]],
+                               args: T.List[str], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        if self.subproject not in argsdict:
+            argsdict[self.subproject] = {}
+        self._add_arguments(node, argsdict[self.subproject],
+                            self.project_args_frozen, args, kwargs)
+
+    def _add_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+                       args_frozen: bool, args: T.List[str], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+        if args_frozen:
+            msg = f'Tried to use \'{node.func_name}\' after a build target has been declared.\n' \
+                  'This is not permitted. Please declare all arguments before your targets.'
+            raise InvalidCode(msg)
+
+        self._warn_about_builtin_args(args)
+
+        for lang in kwargs['language']:
+            argsdict[lang] = argsdict.get(lang, []) + args
+
+    @noKwargs
+    @noArgsFlattening
+    def func_environment(self, node, args, kwargs):
+        if len(args) > 1:
+            raise InterpreterException('environment takes only one optional positional arguments')
+        elif len(args) == 1:
+            FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject)
+            initial_values = args[0]
+            if not isinstance(initial_values, dict) and not isinstance(initial_values, list):
+                raise InterpreterException('environment first argument must be a dictionary or a list')
+        else:
+            initial_values = {}
+        return EnvironmentVariablesObject(initial_values, self.subproject)
+
+    @stringArgs
+    @noKwargs
+    def func_join_paths(self, node, args, kwargs):
+        return self.join_path_strings(args)
+
+    def run(self) -> None:
+        super().run()
+        mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
+        FeatureNew.report(self.subproject)
+        FeatureDeprecated.report(self.subproject)
+        if not self.is_subproject():
+            self.print_extra_warnings()
+        if self.subproject == '':
+            self._print_summary()
+
+    def print_extra_warnings(self) -> None:
+        # TODO cross compilation
+        for c in self.coredata.compilers.host.values():
+            if c.get_id() == 'clang':
+                self.check_clang_asan_lundef()
+                break
+
+    def check_clang_asan_lundef(self) -> None:
+        if OptionKey('b_lundef') not in self.coredata.options:
+            return
+        if OptionKey('b_sanitize') not in self.coredata.options:
+            return
+        if (self.coredata.options[OptionKey('b_lundef')].value and
+                self.coredata.options[OptionKey('b_sanitize')].value != 'none'):
+            mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef.
+This will probably not work.
+Try setting b_lundef to false instead.'''.format(self.coredata.options[OptionKey('b_sanitize')].value),
+                         location=self.current_node)
+
+    # Check that the indicated file is within the same subproject
+    # as we currently are. This is to stop people doing
+    # nasty things like:
+    #
+    # f = files('../../master_src/file.c')
+    #
+    # Note that this is validated only when the file
+    # object is generated. The result can be used in a different
+    # subproject than it is defined in (due to e.g. a
+    # declare_dependency).
+    def validate_within_subproject(self, subdir, fname):
+        srcdir = Path(self.environment.source_dir)
+        norm = Path(srcdir, subdir, fname).resolve()
+        if os.path.isdir(norm):
+            inputtype = 'directory'
+        else:
+            inputtype = 'file'
+        if srcdir not in norm.parents:
+            # Grabbing files outside the source tree is ok.
+            # This is for vendor stuff like:
+            #
+            # /opt/vendorsdk/src/file_with_license_restrictions.c
+            return
+        project_root = Path(srcdir, self.root_subdir)
+        if norm == project_root:
+            return
+        if project_root not in norm.parents:
+            raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} outside current (sub)project.')
+        if project_root / self.subproject_dir in norm.parents:
+            raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} from a nested subproject.')
+
+
+    @T.overload
+    def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString']) -> T.List['mesonlib.File']: ...
+
+    def source_strings_to_files(self, sources: T.List['SourceInputs']) -> T.List['SourceOutputs']:
+        """Lower inputs to a list of Targets and Files, replacing any strings.
+
+        :param sources: A raw (Meson DSL) list of inputs (targets, files, and
+            strings)
+        :raises InterpreterException: if any of the inputs are of an invalid type
+        :return: A list of Targets and Files
+        """
+        mesonlib.check_direntry_issues(sources)
+        if not isinstance(sources, list):
+            sources = [sources]
+        results: T.List['SourceOutputs'] = []
+        for s in sources:
+            if isinstance(s, str):
+                self.validate_within_subproject(self.subdir, s)
+                results.append(mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s))
+            elif isinstance(s, mesonlib.File):
+                results.append(s)
+            elif isinstance(s, (build.GeneratedList, build.BuildTarget,
+                                build.CustomTargetIndex, build.CustomTarget,
+                                build.GeneratedList)):
+                results.append(s)
+            else:
+                raise InterpreterException(f'Source item is {s!r} instead of '
+                                           'string or File-type object')
+        return results
+
+    def add_target(self, name, tobj):
+        if name == '':
+            raise InterpreterException('Target name must not be empty.')
+        if name.strip() == '':
+            raise InterpreterException('Target name must not consist only of whitespace.')
+        if name.startswith('meson-'):
+            raise InvalidArguments("Target names starting with 'meson-' are reserved "
+                                   "for Meson's internal use. Please rename.")
+        if name in coredata.FORBIDDEN_TARGET_NAMES:
+            raise InvalidArguments("Target name '%s' is reserved for Meson's "
+                                   "internal use. Please rename." % name)
+        # To permit an executable and a shared library to have the
+        # same name, such as "foo.exe" and "libfoo.a".
+        idname = tobj.get_id()
+        if idname in self.build.targets:
+            raise InvalidCode('Tried to create target "%s", but a target of that name already exists.' % name)
+        self.build.targets[idname] = tobj
+        if idname not in self.coredata.target_guids:
+            self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+
+    @FeatureNew('both_libraries', '0.46.0')
+    def build_both_libraries(self, node, args, kwargs):
+        shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
+
+        # Check if user forces non-PIC static library.
+        pic = True
+        key = OptionKey('b_staticpic')
+        if 'pic' in kwargs:
+            pic = kwargs['pic']
+        elif key in self.environment.coredata.options:
+            pic = self.environment.coredata.options[key].value
+
+        if self.backend.name == 'xcode':
+            # Xcode is a bit special in that you can't (at least for the moment)
+            # form a library only from object file inputs. The simple but inefficient
+            # solution is to use the sources directly. This will lead to them being
+            # built twice. This is unfortunate and slow, but at least it works.
+            # Feel free to submit patches to get this fixed if it is an
+            # issue for you.
+            reuse_object_files = False
+        else:
+            reuse_object_files = pic
+
+        if reuse_object_files:
+            # Exclude sources from args and kwargs to avoid building them twice
+            static_args = [args[0]]
+            static_kwargs = kwargs.copy()
+            static_kwargs['sources'] = []
+            static_kwargs['objects'] = shared_lib.extract_all_objects()
+        else:
+            static_args = args
+            static_kwargs = kwargs
+
+        static_lib = self.build_target(node, static_args, static_kwargs, build.StaticLibrary)
+
+        return build.BothLibraries(shared_lib, static_lib)
+
+    def build_library(self, node, args, kwargs):
+        default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
+        if default_library == 'shared':
+            return self.build_target(node, args, kwargs, build.SharedLibrary)
+        elif default_library == 'static':
+            return self.build_target(node, args, kwargs, build.StaticLibrary)
+        elif default_library == 'both':
+            return self.build_both_libraries(node, args, kwargs)
+        else:
+            raise InterpreterException('Unknown default_library value: %s.', default_library)
+
+    def build_target(self, node, args, kwargs, targetclass):
+        @FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories'])
+        @FeatureNewKwargs('build target', '0.41.0', ['rust_args'])
+        @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+        @FeatureNewKwargs('build target', '0.48.0', ['gnu_symbol_visibility'])
+        def build_target_decorator_caller(self, node, args, kwargs):
+            return True
+
+        build_target_decorator_caller(self, node, args, kwargs)
+
+        if not args:
+            raise InterpreterException('Target does not have a name.')
+        name, *sources = args
+        for_machine = self.machine_from_native_kwarg(kwargs)
+        if 'sources' in kwargs:
+            sources += listify(kwargs['sources'])
+        sources = self.source_strings_to_files(sources)
+        objs = extract_as_list(kwargs, 'objects')
+        kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
+        kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
+        if 'extra_files' in kwargs:
+            ef = extract_as_list(kwargs, 'extra_files')
+            kwargs['extra_files'] = self.source_strings_to_files(ef)
+        self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
+        if targetclass not in {build.Executable, build.SharedLibrary, build.SharedModule, build.StaticLibrary, build.Jar}:
+            mlog.debug('Unknown target type:', str(targetclass))
+            raise RuntimeError('Unreachable code')
+        self.kwarg_strings_to_includedirs(kwargs)
+
+        # Filter out kwargs from other target types. For example 'soversion'
+        # passed to library() when default_library == 'static'.
+        kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
+        kwargs['include_directories'] = self.extract_incdirs(kwargs)
+        target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs)
+        target.project_version = self.project_version
+
+        self.add_stdlib_info(target)
+        self.add_target(name, target)
+        self.project_args_frozen = True
+        return target
+
+    def kwarg_strings_to_includedirs(self, kwargs):
+        if 'd_import_dirs' in kwargs:
+            items = mesonlib.extract_as_list(kwargs, 'd_import_dirs')
+            cleaned_items = []
+            for i in items:
+                if isinstance(i, str):
+                    # BW compatibility. This was permitted so we must support it
+                    # for a few releases so people can transition to "correct"
+                    # path declarations.
+                    if os.path.normpath(i).startswith(self.environment.get_source_dir()):
+                        mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
+This will become a hard error in the future.''', location=self.current_node)
+                        i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
+                        i = self.build_incdir_object([i])
+                cleaned_items.append(i)
+            kwargs['d_import_dirs'] = cleaned_items
+
+    def get_used_languages(self, target):
+        result = set()
+        for i in target.sources:
+            for lang, c in self.coredata.compilers[target.for_machine].items():
+                if c.can_compile(i):
+                    result.add(lang)
+                    break
+        return result
+
+    def add_stdlib_info(self, target):
+        for l in self.get_used_languages(target):
+            dep = self.build.stdlibs[target.for_machine].get(l, None)
+            if dep:
+                target.add_deps(dep)
+
+    def check_sources_exist(self, subdir, sources):
+        for s in sources:
+            if not isinstance(s, str):
+                continue # This means a generated source and they always exist.
+            fname = os.path.join(subdir, s)
+            if not os.path.isfile(fname):
+                raise InterpreterException('Tried to add non-existing source file %s.' % s)
+
+    # Only permit object extraction from the same subproject
+    def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+        if self.subproject != buildtarget.subproject:
+            raise InterpreterException('Tried to extract objects from a different subproject.')
+
+    def is_subproject(self):
+        return self.subproject != ''
+
+    @noKwargs
+    @noArgsFlattening
+    @noSecondLevelHolderResolving
+    def func_set_variable(self, node, args, kwargs):
+        if len(args) != 2:
+            raise InvalidCode('Set_variable takes two arguments.')
+        varname, value = args
+        self.set_variable(varname, value, holderify=True)
+
+    @noKwargs
+    @noArgsFlattening
+    @permissive_unholder_return
+    def func_get_variable(self, node, args, kwargs):
+        if len(args) < 1 or len(args) > 2:
+            raise InvalidCode('Get_variable takes one or two arguments.')
+        varname = args[0]
+        if isinstance(varname, Disabler):
+            return varname
+        if not isinstance(varname, str):
+            raise InterpreterException('First argument must be a string.')
+        try:
+            return self.variables[varname]
+        except KeyError:
+            pass
+        if len(args) == 2:
+            return args[1]
+        raise InterpreterException('Tried to get unknown variable "%s".' % varname)
+
+    @stringArgs
+    @noKwargs
+    def func_is_variable(self, node, args, kwargs):
+        if len(args) != 1:
+            raise InvalidCode('Is_variable takes two arguments.')
+        varname = args[0]
+        return varname in self.variables
+
+    @staticmethod
+    def machine_from_native_kwarg(kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+        native = kwargs.get('native', False)
+        if not isinstance(native, bool):
+            raise InvalidArguments('Argument to "native" must be a boolean.')
+        return MachineChoice.BUILD if native else MachineChoice.HOST
+
+    @FeatureNew('is_disabler', '0.52.0')
+    @noKwargs
+    def func_is_disabler(self, node, args, kwargs):
+        if len(args) != 1:
+            raise InvalidCode('Is_disabler takes one argument.')
+        varname = args[0]
+        return isinstance(varname, Disabler)
+
+    @noKwargs
+    @FeatureNew('range', '0.58.0')
+    @typed_pos_args('range', int, optargs=[int, int])
+    def func_range(self, node, args: T.Tuple[int, T.Optional[int], T.Optional[int]], kwargs: T.Dict[str, T.Any]) -> RangeHolder:
+        start, stop, step = args
+        # Just like Python's range, we allow range(stop), range(start, stop), or
+        # range(start, stop, step)
+        if stop is None:
+            stop = start
+            start = 0
+        if step is None:
+            step = 1
+        # This is more strict than Python's range()
+        if start < 0:
+            raise InterpreterException('start cannot be negative')
+        if stop < start:
+            raise InterpreterException('stop cannot be less than start')
+        if step < 1:
+            raise InterpreterException('step must be >=1')
+        return RangeHolder(start, stop, step, subproject=self.subproject)
diff --git a/meson/mesonbuild/interpreter/interpreterobjects.py b/meson/mesonbuild/interpreter/interpreterobjects.py
new file mode 100644
index 000000000..5dc65d03f
--- /dev/null
+++ b/meson/mesonbuild/interpreter/interpreterobjects.py
@@ -0,0 +1,996 @@
+import os
+import shlex
+import subprocess
+import copy
+import textwrap
+
+from pathlib import Path, PurePath
+
+from .. import mesonlib
+from .. import coredata
+from .. import build
+from .. import mlog
+
+from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule
+from ..backend.backends import TestProtocol
+from ..interpreterbase import (
+                               ContainerTypeInfo, KwargInfo,
+                               InterpreterObject, MesonInterpreterObject, ObjectHolder, MutableInterpreterObject,
+                               FeatureCheckBase, FeatureNewKwargs, FeatureNew, FeatureDeprecated,
+                               typed_pos_args, typed_kwargs, stringArgs, permittedKwargs,
+                               noArgsFlattening, noPosargs, noKwargs, permissive_unholder_return, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs,
+                               flatten, resolve_second_level_holders, InterpreterException, InvalidArguments, InvalidCode)
+from ..dependencies import Dependency, ExternalLibrary, InternalDependency
+from ..programs import ExternalProgram
+from ..mesonlib import HoldableObject, MesonException, OptionKey, listify, Popen_safe
+
+import typing as T
+
+if T.TYPE_CHECKING:
+    from . import kwargs
+    from .interpreter import Interpreter
+    from ..environment import Environment
+    from ..envconfig import MachineInfo
+
+
+def extract_required_kwarg(kwargs: 'kwargs.ExtractRequired',
+                           subproject: str,
+                           feature_check: T.Optional[FeatureCheckBase] = None,
+                           default: bool = True) -> T.Tuple[bool, bool, T.Optional[str]]:
+    val = kwargs.get('required', default)
+    disabled = False
+    required = False
+    feature: T.Optional[str] = None
+    if isinstance(val, coredata.UserFeatureOption):
+        if not feature_check:
+            feature_check = FeatureNew('User option "feature"', '0.47.0')
+        feature_check.use(subproject)
+        feature = val.name
+        if val.is_disabled():
+            disabled = True
+        elif val.is_enabled():
+            required = True
+    elif isinstance(val, bool):
+        required = val
+    else:
+        raise InterpreterException('required keyword argument must be boolean or a feature option')
+
+    # Keep boolean value in kwargs to simplify other places where this kwarg is
+    # checked.
+    # TODO: this should be removed, and those callers should learn about FeatureOptions
+    kwargs['required'] = required
+
+    return disabled, required, feature
+
+def extract_search_dirs(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+    search_dirs_str = mesonlib.stringlistify(kwargs.get('dirs', []))
+    search_dirs = [Path(d).expanduser() for d in search_dirs_str]
+    for d in search_dirs:
+        if mesonlib.is_windows() and d.root.startswith('\\'):
+            # a Unix-path starting with `/` that is not absolute on Windows.
+            # discard without failing for end-user ease of cross-platform directory arrays
+            continue
+        if not d.is_absolute():
+            raise InvalidCode(f'Search directory {d} is not an absolute path.')
+    return list(map(str, search_dirs))
+
+class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
+    def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter'):
+        super().__init__(option, interpreter)
+        if option and option.is_auto():
+            # TODO: we need to case here because options is not a TypedDict
+            self.held_object = T.cast(coredata.UserFeatureOption, self.env.coredata.options[OptionKey('auto_features')])
+            self.held_object.name = option.name
+        self.methods.update({'enabled': self.enabled_method,
+                             'disabled': self.disabled_method,
+                             'allowed': self.allowed_method,
+                             'auto': self.auto_method,
+                             'require': self.require_method,
+                             'disable_auto_if': self.disable_auto_if_method,
+                             })
+
+    @property
+    def value(self) -> str:
+        return 'disabled' if not self.held_object else self.held_object.value
+
+    def as_disabled(self) -> coredata.UserFeatureOption:
+        disabled = copy.deepcopy(self.held_object)
+        disabled.value = 'disabled'
+        return disabled
+
+    @noPosargs
+    @noKwargs
+    def enabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value == 'enabled'
+
+    @noPosargs
+    @noKwargs
+    def disabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value == 'disabled'
+
+    @noPosargs
+    @noKwargs
+    def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return not self.value == 'disabled'
+
+    @noPosargs
+    @noKwargs
+    def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value == 'auto'
+
+    @permittedKwargs({'error_message'})
+    def require_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+        if len(args) != 1:
+            raise InvalidArguments('Expected 1 argument, got %d.' % (len(args), ))
+        if not isinstance(args[0], bool):
+            raise InvalidArguments('boolean argument expected.')
+        error_message = kwargs.pop('error_message', '')
+        if error_message and not isinstance(error_message, str):
+            raise InterpreterException("Error message must be a string.")
+        if args[0]:
+            return copy.deepcopy(self.held_object)
+
+        assert isinstance(error_message, str)
+        if self.value == 'enabled':
+            prefix = f'Feature {self.held_object.name} cannot be enabled'
+            prefix = prefix + ': ' if error_message else ''
+            raise InterpreterException(prefix + error_message)
+        return self.as_disabled()
+
+    @noKwargs
+    def disable_auto_if_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+        if len(args) != 1:
+            raise InvalidArguments('Expected 1 argument, got %d.' % (len(args), ))
+        if not isinstance(args[0], bool):
+            raise InvalidArguments('boolean argument expected.')
+        return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled()
+
+
+class RunProcess(MesonInterpreterObject):
+
+    def __init__(self,
+                 cmd: ExternalProgram,
+                 args: T.List[str],
+                 env: build.EnvironmentVariables,
+                 source_dir: str,
+                 build_dir: str,
+                 subdir: str,
+                 mesonintrospect: T.List[str],
+                 in_builddir: bool = False,
+                 check: bool = False,
+                 capture: bool = True) -> None:
+        super().__init__()
+        if not isinstance(cmd, ExternalProgram):
+            raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
+        self.capture = capture
+        self.returncode, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
+        self.methods.update({'returncode': self.returncode_method,
+                             'stdout': self.stdout_method,
+                             'stderr': self.stderr_method,
+                             })
+
+    def run_command(self,
+                    cmd: ExternalProgram,
+                    args: T.List[str],
+                    env: build.EnvironmentVariables,
+                    source_dir: str,
+                    build_dir: str,
+                    subdir: str,
+                    mesonintrospect: T.List[str],
+                    in_builddir: bool,
+                    check: bool = False) -> T.Tuple[int, str, str]:
+        command_array = cmd.get_command() + args
+        menv = {'MESON_SOURCE_ROOT': source_dir,
+                'MESON_BUILD_ROOT': build_dir,
+                'MESON_SUBDIR': subdir,
+                'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+                }
+        if in_builddir:
+            cwd = os.path.join(build_dir, subdir)
+        else:
+            cwd = os.path.join(source_dir, subdir)
+        child_env = os.environ.copy()
+        child_env.update(menv)
+        child_env = env.get_env(child_env)
+        stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
+        mlog.debug('Running command:', ' '.join(command_array))
+        try:
+            p, o, e = Popen_safe(command_array, stdout=stdout, env=child_env, cwd=cwd)
+            if self.capture:
+                mlog.debug('--- stdout ---')
+                mlog.debug(o)
+            else:
+                o = ''
+                mlog.debug('--- stdout disabled ---')
+            mlog.debug('--- stderr ---')
+            mlog.debug(e)
+            mlog.debug('')
+
+            if check and p.returncode != 0:
+                raise InterpreterException('Command "{}" failed with status {}.'.format(' '.join(command_array), p.returncode))
+
+            return p.returncode, o, e
+        except FileNotFoundError:
+            raise InterpreterException('Could not execute command "%s".' % ' '.join(command_array))
+
+    @noPosargs
+    @noKwargs
+    def returncode_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+        return self.returncode
+
+    @noPosargs
+    @noKwargs
+    def stdout_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.stdout
+
+    @noPosargs
+    @noKwargs
+    def stderr_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.stderr
+
+# TODO: Parsing the initial values should be either done directly in the
+#       `Interpreter` or in `build.EnvironmentVariables`. This way, this class
+#       can be converted into a pure object holder.
+class EnvironmentVariablesObject(MutableInterpreterObject, MesonInterpreterObject):
+    # TODO: Move the type cheking for initial_values out of this class and replace T.Any
+    def __init__(self, initial_values: T.Optional[T.Any] = None, subproject: str = ''):
+        super().__init__(subproject=subproject)
+        self.vars = build.EnvironmentVariables()
+        self.methods.update({'set': self.set_method,
+                             'append': self.append_method,
+                             'prepend': self.prepend_method,
+                             })
+        if isinstance(initial_values, dict):
+            for k, v in initial_values.items():
+                self.set_method([k, v], {})
+        elif initial_values is not None:
+            for e in mesonlib.listify(initial_values):
+                if not isinstance(e, str):
+                    raise InterpreterException('Env var definition must be a list of strings.')
+                if '=' not in e:
+                    raise InterpreterException('Env var definition must be of type key=val.')
+                (k, val) = e.split('=', 1)
+                k = k.strip()
+                val = val.strip()
+                if ' ' in k:
+                    raise InterpreterException('Env var key must not have spaces in it.')
+                self.set_method([k, val], {})
+
+    def __repr__(self) -> str:
+        repr_str = "<{0}: {1}>"
+        return repr_str.format(self.__class__.__name__, self.vars.envvars)
+
+    def unpack_separator(self, kwargs: T.Dict[str, T.Any]) -> str:
+        separator = kwargs.get('separator', os.pathsep)
+        if not isinstance(separator, str):
+            raise InterpreterException("EnvironmentVariablesObject methods 'separator'"
+                                       " argument needs to be a string.")
+        return separator
+
+    def warn_if_has_name(self, name: str) -> None:
+        # Multiple append/prepend operations was not supported until 0.58.0.
+        if self.vars.has_name(name):
+            m = f'Overriding previous value of environment variable {name!r} with a new one'
+            FeatureNew('0.58.0', m).use(self.subproject)
+
+    @stringArgs
+    @permittedKwargs({'separator'})
+    @typed_pos_args('environment.set', str, varargs=str, min_varargs=1)
+    def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> None:
+        name, values = args
+        separator = self.unpack_separator(kwargs)
+        self.vars.set(name, values, separator)
+
+    @stringArgs
+    @permittedKwargs({'separator'})
+    @typed_pos_args('environment.append', str, varargs=str, min_varargs=1)
+    def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> None:
+        name, values = args
+        separator = self.unpack_separator(kwargs)
+        self.warn_if_has_name(name)
+        self.vars.append(name, values, separator)
+
+    @stringArgs
+    @permittedKwargs({'separator'})
+    @typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1)
+    def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> None:
+        name, values = args
+        separator = self.unpack_separator(kwargs)
+        self.warn_if_has_name(name)
+        self.vars.prepend(name, values, separator)
+
+
+class ConfigurationDataObject(MutableInterpreterObject, MesonInterpreterObject):
+    def __init__(self, subproject: str, initial_values: T.Optional[T.Dict[str, T.Any]] = None) -> None:
+        self.used = False # These objects become immutable after use in configure_file.
+        super().__init__(subproject=subproject)
+        self.conf_data = build.ConfigurationData()
+        self.methods.update({'set': self.set_method,
+                             'set10': self.set10_method,
+                             'set_quoted': self.set_quoted_method,
+                             'has': self.has_method,
+                             'get': self.get_method,
+                             'keys': self.keys_method,
+                             'get_unquoted': self.get_unquoted_method,
+                             'merge_from': self.merge_from_method,
+                             })
+        if isinstance(initial_values, dict):
+            for k, v in initial_values.items():
+                self.set_method([k, v], {})
+        elif initial_values:
+            raise AssertionError('Unsupported ConfigurationDataObject initial_values')
+
+    def is_used(self) -> bool:
+        return self.used
+
+    def mark_used(self) -> None:
+        self.used = True
+
+    def validate_args(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Tuple[str, T.Union[str, int, bool], T.Optional[str]]:
+        if len(args) == 1 and isinstance(args[0], list) and len(args[0]) == 2:
+            mlog.deprecation('Passing a list as the single argument to '
+                             'configuration_data.set is deprecated. This will '
+                             'become a hard error in the future.',
+                             location=self.current_node)
+            args = args[0]
+
+        if len(args) != 2:
+            raise InterpreterException("Configuration set requires 2 arguments.")
+        if self.used:
+            raise InterpreterException("Can not set values on configuration object that has been used.")
+        name, val = args
+        if not isinstance(val, (int, str)):
+            msg = f'Setting a configuration data value to {val!r} is invalid, ' \
+                  'and will fail at configure_file(). If you are using it ' \
+                  'just to store some values, please use a dict instead.'
+            mlog.deprecation(msg, location=self.current_node)
+        desc = kwargs.get('description', None)
+        if not isinstance(name, str):
+            raise InterpreterException("First argument to set must be a string.")
+        if desc is not None and not isinstance(desc, str):
+            raise InterpreterException('Description must be a string.')
+
+        # TODO: Remove the cast once we get rid of the deprecation
+        return name, T.cast(T.Union[str, bool, int], val), desc
+
+    @noArgsFlattening
+    def set_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+        (name, val, desc) = self.validate_args(args, kwargs)
+        self.conf_data.values[name] = (val, desc)
+
+    def set_quoted_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+        (name, val, desc) = self.validate_args(args, kwargs)
+        if not isinstance(val, str):
+            raise InterpreterException("Second argument to set_quoted must be a string.")
+        escaped_val = '\\"'.join(val.split('"'))
+        self.conf_data.values[name] = ('"' + escaped_val + '"', desc)
+
+    def set10_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+        (name, val, desc) = self.validate_args(args, kwargs)
+        if val:
+            self.conf_data.values[name] = (1, desc)
+        else:
+            self.conf_data.values[name] = (0, desc)
+
+    def has_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return args[0] in self.conf_data.values
+
+    @FeatureNew('configuration_data.get()', '0.38.0')
+    @noArgsFlattening
+    def get_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+        if len(args) < 1 or len(args) > 2:
+            raise InterpreterException('Get method takes one or two arguments.')
+        if not isinstance(args[0], str):
+            raise InterpreterException('The variable name must be a string.')
+        name = args[0]
+        if name in self.conf_data:
+            return self.conf_data.get(name)[0]
+        if len(args) > 1:
+            # Assertion does not work because setting other values is still
+            # supported, but deprecated. Use T.cast in the meantime (even though
+            # this is a lie).
+            # TODO: Fix this once the deprecation is removed
+            # assert isinstance(args[1], (int, str, bool))
+            return T.cast(T.Union[str, int, bool], args[1])
+        raise InterpreterException('Entry %s not in configuration data.' % name)
+
+    @FeatureNew('configuration_data.get_unquoted()', '0.44.0')
+    def get_unquoted_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+        if len(args) < 1 or len(args) > 2:
+            raise InterpreterException('Get method takes one or two arguments.')
+        if not isinstance(args[0], str):
+            raise InterpreterException('The variable name must be a string.')
+        name = args[0]
+        if name in self.conf_data:
+            val = self.conf_data.get(name)[0]
+        elif len(args) > 1:
+            assert isinstance(args[1], (str, int, bool))
+            val = args[1]
+        else:
+            raise InterpreterException('Entry %s not in configuration data.' % name)
+        if isinstance(val, str) and val[0] == '"' and val[-1] == '"':
+            return val[1:-1]
+        return val
+
+    def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
+        return self.conf_data.values[name]
+
+    @FeatureNew('configuration_data.keys()', '0.57.0')
+    @noPosargs
+    def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+        return sorted(self.keys())
+
+    def keys(self) -> T.List[str]:
+        return list(self.conf_data.values.keys())
+
+    def merge_from_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+        if len(args) != 1:
+            raise InterpreterException('Merge_from takes one positional argument.')
+        from_object_holder = args[0]
+        if not isinstance(from_object_holder, ConfigurationDataObject):
+            raise InterpreterException('Merge_from argument must be a configuration data object.')
+        from_object = from_object_holder.conf_data
+        for k, v in from_object.values.items():
+            self.conf_data.values[k] = v
+
+
+_PARTIAL_DEP_KWARGS = [
+    KwargInfo('compile_args', bool, default=False),
+    KwargInfo('link_args',    bool, default=False),
+    KwargInfo('links',        bool, default=False),
+    KwargInfo('includes',     bool, default=False),
+    KwargInfo('sources',      bool, default=False),
+]
+
+class DependencyHolder(ObjectHolder[Dependency]):
+    def __init__(self, dep: Dependency, interpreter: 'Interpreter'):
+        super().__init__(dep, interpreter)
+        self.methods.update({'found': self.found_method,
+                             'type_name': self.type_name_method,
+                             'version': self.version_method,
+                             'name': self.name_method,
+                             'get_pkgconfig_variable': self.pkgconfig_method,
+                             'get_configtool_variable': self.configtool_method,
+                             'get_variable': self.variable_method,
+                             'partial_dependency': self.partial_dependency_method,
+                             'include_type': self.include_type_method,
+                             'as_system': self.as_system_method,
+                             'as_link_whole': self.as_link_whole_method,
+                             })
+
+    def found(self) -> bool:
+        return self.found_method([], {})
+
+    @noPosargs
+    @noKwargs
+    def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.type_name
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        if self.held_object.type_name == 'internal':
+            return True
+        return self.held_object.found()
+
+    @noPosargs
+    @noKwargs
+    def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_version()
+
+    @noPosargs
+    @noKwargs
+    def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_name()
+
+    @FeatureDeprecated('Dependency.get_pkgconfig_variable', '0.56.0',
+                       'use Dependency.get_variable(pkgconfig : ...) instead')
+    @permittedKwargs({'define_variable', 'default'})
+    def pkgconfig_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        args = listify(args)
+        if len(args) != 1:
+            raise InterpreterException('get_pkgconfig_variable takes exactly one argument.')
+        varname = args[0]
+        if not isinstance(varname, str):
+            raise InterpreterException('Variable name must be a string.')
+        return self.held_object.get_pkgconfig_variable(varname, kwargs)
+
+    @FeatureNew('dep.get_configtool_variable', '0.44.0')
+    @FeatureDeprecated('Dependency.get_configtool_variable', '0.56.0',
+                       'use Dependency.get_variable(configtool : ...) instead')
+    @noKwargs
+    def configtool_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        args = listify(args)
+        if len(args) != 1:
+            raise InterpreterException('get_configtool_variable takes exactly one argument.')
+        varname = args[0]
+        if not isinstance(varname, str):
+            raise InterpreterException('Variable name must be a string.')
+        return self.held_object.get_configtool_variable(varname)
+
+    @FeatureNew('dep.partial_dependency', '0.46.0')
+    @noPosargs
+    @typed_kwargs('dep.partial_dependency', *_PARTIAL_DEP_KWARGS)
+    def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+        pdep = self.held_object.get_partial_dependency(**kwargs)
+        return pdep
+
+    @FeatureNew('dep.get_variable', '0.51.0')
+    @typed_pos_args('dep.get_variable', optargs=[str])
+    @permittedKwargs({'cmake', 'pkgconfig', 'configtool', 'internal', 'default_value', 'pkgconfig_define'})
+    @FeatureNewKwargs('dep.get_variable', '0.54.0', ['internal'])
+    def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: T.Dict[str, T.Any]) -> T.Union[str, T.List[str]]:
+        default_varname = args[0]
+        if default_varname is not None:
+            FeatureNew('0.58.0', 'Positional argument to dep.get_variable()').use(self.subproject)
+            for k in ['cmake', 'pkgconfig', 'configtool', 'internal']:
+                kwargs.setdefault(k, default_varname)
+        return self.held_object.get_variable(**kwargs)
+
+    @FeatureNew('dep.include_type', '0.52.0')
+    @noPosargs
+    @noKwargs
+    def include_type_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_include_type()
+
+    @FeatureNew('dep.as_system', '0.52.0')
+    @noKwargs
+    def as_system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
+        args = listify(args)
+        new_is_system = 'system'
+        if len(args) > 1:
+            raise InterpreterException('as_system takes only one optional value')
+        if len(args) == 1:
+            if not isinstance(args[0], str):
+                raise InterpreterException('as_system takes exactly one string parameter')
+            new_is_system = args[0]
+        new_dep = self.held_object.generate_system_dependency(new_is_system)
+        return new_dep
+
+    @FeatureNew('dep.as_link_whole', '0.56.0')
+    @noKwargs
+    @noPosargs
+    def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
+        if not isinstance(self.held_object, InternalDependency):
+            raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects')
+        new_dep = self.held_object.generate_link_whole_dependency()
+        return new_dep
+
+class ExternalProgramHolder(ObjectHolder[ExternalProgram]):
+    def __init__(self, ep: ExternalProgram, interpreter: 'Interpreter') -> None:
+        super().__init__(ep, interpreter)
+        self.methods.update({'found': self.found_method,
+                             'path': self.path_method,
+                             'full_path': self.full_path_method})
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.found()
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+                       'use ExternalProgram.full_path() instead')
+    def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._full_path()
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('ExternalProgram.full_path', '0.55.0')
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._full_path()
+
+    def _full_path(self) -> str:
+        if not self.found():
+            raise InterpreterException('Unable to get the path of a not-found external program')
+        path = self.held_object.get_path()
+        assert path is not None
+        return path
+
+    def found(self) -> bool:
+        return self.held_object.found()
+
+class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
+    def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'):
+        super().__init__(el, interpreter)
+        self.methods.update({'found': self.found_method,
+                             'type_name': self.type_name_method,
+                             'partial_dependency': self.partial_dependency_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.type_name
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object.found()
+
+    @FeatureNew('dep.partial_dependency', '0.46.0')
+    @noPosargs
+    @typed_kwargs('dep.partial_dependency', *_PARTIAL_DEP_KWARGS)
+    def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+        pdep = self.held_object.get_partial_dependency(**kwargs)
+        return pdep
+
+# A machine that's statically known from the cross file
+class MachineHolder(ObjectHolder['MachineInfo']):
+    def __init__(self, machine_info: 'MachineInfo', interpreter: 'Interpreter'):
+        super().__init__(machine_info, interpreter)
+        self.methods.update({'system': self.system_method,
+                             'cpu': self.cpu_method,
+                             'cpu_family': self.cpu_family_method,
+                             'endian': self.endian_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.cpu_family
+
+    @noPosargs
+    @noKwargs
+    def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.cpu
+
+    @noPosargs
+    @noKwargs
+    def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.system
+
+    @noPosargs
+    @noKwargs
+    def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.endian
+
+class IncludeDirsHolder(ObjectHolder[build.IncludeDirs]):
+    pass
+
+class FileHolder(ObjectHolder[mesonlib.File]):
+    pass
+
+class HeadersHolder(ObjectHolder[build.Headers]):
+    pass
+
+class DataHolder(ObjectHolder[build.Data]):
+    pass
+
+class InstallDirHolder(ObjectHolder[build.InstallDir]):
+    pass
+
+class ManHolder(ObjectHolder[build.Man]):
+    pass
+
+class GeneratedObjectsHolder(ObjectHolder[build.ExtractedObjects]):
+    pass
+
+class Test(MesonInterpreterObject):
+    def __init__(self, name: str, project: str, suite: T.List[str], exe: build.Executable,
+                 depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]],
+                 is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables,
+                 should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str,
+                 priority: int):
+        super().__init__()
+        self.name = name
+        self.suite = listify(suite)
+        self.project_name = project
+        self.exe = exe
+        self.depends = depends
+        self.is_parallel = is_parallel
+        self.cmd_args = cmd_args
+        self.env = env
+        self.should_fail = should_fail
+        self.timeout = timeout
+        self.workdir = workdir
+        self.protocol = TestProtocol.from_str(protocol)
+        self.priority = priority
+
+    def get_exe(self) -> build.Executable:
+        return self.exe
+
+    def get_name(self) -> str:
+        return self.name
+
+class NullSubprojectInterpreter(HoldableObject):
+    pass
+
+# TODO: This should really be an `ObjectHolder`, but the additional stuff in this
+#       class prevents this. Thus, this class should be split into a pure
+#       `ObjectHolder` and a class specifically for stroing in `Interpreter`.
+class SubprojectHolder(MesonInterpreterObject):
+
+    def __init__(self, subinterpreter: T.Union['Interpreter', NullSubprojectInterpreter],
+                 subdir: str,
+                 warnings: int = 0,
+                 disabled_feature: T.Optional[str] = None,
+                 exception: T.Optional[MesonException] = None) -> None:
+        super().__init__()
+        self.held_object = subinterpreter
+        self.warnings = warnings
+        self.disabled_feature = disabled_feature
+        self.exception = exception
+        self.subdir = PurePath(subdir).as_posix()
+        self.methods.update({'get_variable': self.get_variable_method,
+                             'found': self.found_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.found()
+
+    def found(self) -> bool:
+        return not isinstance(self.held_object, NullSubprojectInterpreter)
+
+    @noKwargs
+    @noArgsFlattening
+    @permissive_unholder_return
+    def get_variable_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+        if len(args) < 1 or len(args) > 2:
+            raise InterpreterException('Get_variable takes one or two arguments.')
+        if isinstance(self.held_object, NullSubprojectInterpreter):  # == not self.found()
+            raise InterpreterException('Subproject "%s" disabled can\'t get_variable on it.' % (self.subdir))
+        varname = args[0]
+        if not isinstance(varname, str):
+            raise InterpreterException('Get_variable first argument must be a string.')
+        try:
+            return self.held_object.variables[varname]
+        except KeyError:
+            pass
+
+        if len(args) == 2:
+            return args[1]
+
+        raise InvalidArguments(f'Requested variable "{varname}" not found.')
+
+class ModuleObjectHolder(ObjectHolder[ModuleObject]):
+    def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+        modobj = self.held_object
+        method = modobj.methods.get(method_name)
+        if not method:
+            raise InvalidCode(f'Unknown method {method_name!r} in object.')
+        if not getattr(method, 'no-args-flattening', False):
+            args = flatten(args)
+        if not getattr(method, 'no-second-level-holder-flattening', False):
+            args, kwargs = resolve_second_level_holders(args, kwargs)
+        state = ModuleState(self.interpreter)
+        # Many modules do for example self.interpreter.find_program_impl(),
+        # so we have to ensure they use the current interpreter and not the one
+        # that first imported that module, otherwise it will use outdated
+        # overrides.
+        if isinstance(modobj, ExtensionModule):
+            modobj.interpreter = self.interpreter
+        ret = method(state, args, kwargs)
+        if isinstance(ret, ModuleReturnValue):
+            self.interpreter.process_new_values(ret.new_objects)
+            ret = ret.return_value
+        return ret
+
+class MutableModuleObjectHolder(ModuleObjectHolder, MutableInterpreterObject):
+    def __deepcopy__(self, memo: T.Dict[int, T.Any]) -> 'MutableModuleObjectHolder':
+        # Deepcopy only held object, not interpreter
+        modobj = copy.deepcopy(self.held_object, memo)
+        return MutableModuleObjectHolder(modobj, self.interpreter)
+
+
+_BuildTarget = T.TypeVar('_BuildTarget', bound=T.Union[build.BuildTarget, build.BothLibraries])
+
+class BuildTargetHolder(ObjectHolder[_BuildTarget]):
+    def __init__(self, target: _BuildTarget, interp: 'Interpreter'):
+        super().__init__(target, interp)
+        self.methods.update({'extract_objects': self.extract_objects_method,
+                             'extract_all_objects': self.extract_all_objects_method,
+                             'name': self.name_method,
+                             'get_id': self.get_id_method,
+                             'outdir': self.outdir_method,
+                             'full_path': self.full_path_method,
+                             'path': self.path_method,
+                             'found': self.found_method,
+                             'private_dir_include': self.private_dir_include_method,
+                             })
+
+    def __repr__(self) -> str:
+        r = '<{} {}: {}>'
+        h = self.held_object
+        return r.format(self.__class__.__name__, h.get_id(), h.filename)
+
+    @property
+    def _target_object(self) -> build.BuildTarget:
+        if isinstance(self.held_object, build.BothLibraries):
+            return self.held_object.get_default_object()
+        assert isinstance(self.held_object, build.BuildTarget)
+        return self.held_object
+
+    def is_cross(self) -> bool:
+        return not self._target_object.environment.machines.matches_build_machine(self._target_object.for_machine)
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        if not (isinstance(self.held_object, build.Executable) and self.held_object.was_returned_by_find_program):
+            FeatureNew.single_use('BuildTarget.found', '0.59.0', subproject=self.held_object.subproject)
+        return True
+
+    @noPosargs
+    @noKwargs
+    def private_dir_include_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+        return build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self._target_object)])
+
+    @noPosargs
+    @noKwargs
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('BuildTarget.path', '0.55.0', 'Use BuildTarget.full_path instead')
+    def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+    @noPosargs
+    @noKwargs
+    def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_dir(self._target_object)
+
+    @noKwargs
+    @typed_pos_args('extract_objects', varargs=(mesonlib.File, str))
+    def extract_objects_method(self, args: T.Tuple[T.List[mesonlib.FileOrString]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects:
+        return self._target_object.extract_objects(args[0])
+
+    @noPosargs
+    @typed_kwargs(
+        'extract_all_objects',
+        KwargInfo(
+            'recursive', bool, default=False, since='0.46.0',
+            not_set_warning=textwrap.dedent('''\
+                extract_all_objects called without setting recursive
+                keyword argument. Meson currently defaults to
+                non-recursive to maintain backward compatibility but
+                the default will be changed in the future.
+            ''')
+        )
+    )
+    def extract_all_objects_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.BuildTargeMethodExtractAllObjects') -> build.ExtractedObjects:
+        return self._target_object.extract_all_objects(kwargs['recursive'])
+
+    @noPosargs
+    @noKwargs
+    def get_id_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._target_object.get_id()
+
+    @FeatureNew('name', '0.54.0')
+    @noPosargs
+    @noKwargs
+    def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._target_object.name
+
+class ExecutableHolder(BuildTargetHolder[build.Executable]):
+    pass
+
+class StaticLibraryHolder(BuildTargetHolder[build.StaticLibrary]):
+    pass
+
+class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]):
+    pass
+
+class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
+    def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'):
+        # FIXME: This build target always represents the shared library, but
+        # that should be configurable.
+        super().__init__(libs, interp)
+        self.methods.update({'get_shared_lib': self.get_shared_lib_method,
+                             'get_static_lib': self.get_static_lib_method,
+                             })
+
+    def __repr__(self) -> str:
+        r = '<{} {}: {}, {}: {}>'
+        h1 = self.held_object.shared
+        h2 = self.held_object.static
+        return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
+
+    @noPosargs
+    @noKwargs
+    def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary:
+        return self.held_object.shared
+
+    @noPosargs
+    @noKwargs
+    def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary:
+        return self.held_object.static
+
+class SharedModuleHolder(BuildTargetHolder[build.SharedModule]):
+    pass
+
+class JarHolder(BuildTargetHolder[build.Jar]):
+    pass
+
+class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]):
+    def __init__(self, target: build.CustomTargetIndex, interp: 'Interpreter'):
+        super().__init__(target, interp)
+        self.methods.update({'full_path': self.full_path_method,
+                             })
+
+    @FeatureNew('custom_target[i].full_path', '0.54.0')
+    @noPosargs
+    @noKwargs
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        assert self.interpreter.backend is not None
+        return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+class CustomTargetHolder(ObjectHolder[build.CustomTarget]):
+    def __init__(self, target: 'build.CustomTarget', interp: 'Interpreter'):
+        super().__init__(target, interp)
+        self.methods.update({'full_path': self.full_path_method,
+                             'to_list': self.to_list_method,
+                             })
+
+    def __repr__(self) -> str:
+        r = '<{} {}: {}>'
+        h = self.held_object
+        return r.format(self.__class__.__name__, h.get_id(), h.command)
+
+    @noPosargs
+    @noKwargs
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+    @FeatureNew('custom_target.to_list', '0.54.0')
+    @noPosargs
+    @noKwargs
+    def to_list_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[build.CustomTargetIndex]:
+        result = []
+        for i in self.held_object:
+            result.append(i)
+        return result
+
+    def __getitem__(self, index: int) -> build.CustomTargetIndex:
+        return self.held_object[index]
+
+    def __setitem__(self, index: int, value: T.Any) -> None:  # lgtm[py/unexpected-raise-in-special-method]
+        raise InterpreterException('Cannot set a member of a CustomTarget')
+
+    def __delitem__(self, index: int) -> None:  # lgtm[py/unexpected-raise-in-special-method]
+        raise InterpreterException('Cannot delete a member of a CustomTarget')
+
+class RunTargetHolder(ObjectHolder[build.RunTarget]):
+    pass
+
+class AliasTargetHolder(ObjectHolder[build.AliasTarget]):
+    pass
+
+class GeneratedListHolder(ObjectHolder[build.GeneratedList]):
+    pass
+
+class GeneratorHolder(ObjectHolder[build.Generator]):
+    def __init__(self, gen: build.Generator, interpreter: 'Interpreter'):
+        super().__init__(gen, interpreter)
+        self.methods.update({'process': self.process_method})
+
+    @typed_pos_args('generator.process', min_varargs=1, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+    @typed_kwargs(
+        'generator.process',
+        KwargInfo('preserve_path_from', str, since='0.45.0'),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+    )
+    def process_method(self,
+                       args: T.Tuple[T.List[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]],
+                       kwargs: 'kwargs.GeneratorProcess') -> build.GeneratedList:
+        preserve_path_from = kwargs['preserve_path_from']
+        if preserve_path_from is not None:
+            preserve_path_from = os.path.normpath(preserve_path_from)
+            if not os.path.isabs(preserve_path_from):
+                # This is a bit of a hack. Fix properly before merging.
+                raise InvalidArguments('Preserve_path_from must be an absolute path for now. Sorry.')
+
+        if any(isinstance(a, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for a in args[0]):
+            FeatureNew.single_use(
+                f'Calling generator.process with CustomTaget or Index of CustomTarget.',
+                '0.57.0', self.interpreter.subproject)
+
+        gl = self.held_object.process_files(args[0], self.interpreter,
+                                            preserve_path_from, extra_args=kwargs['extra_args'])
+
+        return gl
diff --git a/meson/mesonbuild/interpreter/kwargs.py b/meson/mesonbuild/interpreter/kwargs.py
new file mode 100644
index 000000000..b92b66fd7
--- /dev/null
+++ b/meson/mesonbuild/interpreter/kwargs.py
@@ -0,0 +1,139 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 The Meson Developers
+# Copyright © 2021 Intel Corporation
+
+"""Keyword Argument type annotations."""
+
+import typing as T
+
+from typing_extensions import TypedDict, Literal
+
+from .. import build
+from .. import coredata
+from ..mesonlib import MachineChoice, File, FileMode, FileOrString
+from .interpreterobjects import EnvironmentVariablesObject
+
+
+class FuncAddProjectArgs(TypedDict):
+
+    """Keyword Arguments for the add_*_arguments family of arguments.
+
+    including `add_global_arguments`, `add_project_arguments`, and their
+    link variants
+
+    Because of the use of a convertor function, we get the native keyword as
+    a MachineChoice instance already.
+    """
+
+    native: MachineChoice
+    language: T.List[str]
+
+
+class BaseTest(TypedDict):
+
+    """Shared base for the Rust module."""
+
+    args: T.List[T.Union[str, File, build.Target]]
+    should_fail: bool
+    timeout: int
+    workdir: T.Optional[str]
+    depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]]
+    priority: int
+    env: T.Union[EnvironmentVariablesObject, T.List[str], T.Dict[str, str], str]
+    suite: T.List[str]
+
+
+class FuncBenchmark(BaseTest):
+
+    """Keyword Arguments shared between `test` and `benchmark`."""
+
+    protocol: Literal['exitcode', 'tap', 'gtest', 'rust']
+
+
+class FuncTest(FuncBenchmark):
+
+    """Keyword Arguments for `test`
+
+    `test` only adds the `is_prallel` argument over benchmark, so inherintance
+    is helpful here.
+    """
+
+    is_parallel: bool
+
+
+class ExtractRequired(TypedDict):
+
+    """Keyword Arguments consumed by the `extract_required_kwargs` function.
+
+    Any function that uses the `required` keyword argument which accepts either
+    a boolean or a feature option should inherit it's arguments from this class.
+    """
+
+    required: T.Union[bool, coredata.UserFeatureOption]
+
+
+class FuncGenerator(TypedDict):
+
+    """Keyword rguments for the generator function."""
+
+    arguments: T.List[str]
+    output: T.List[str]
+    depfile: bool
+    capture:  bool
+    depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+
+
+class GeneratorProcess(TypedDict):
+
+    """Keyword Arguments for generator.process."""
+
+    preserve_path_from: T.Optional[str]
+    extra_args: T.List[str]
+
+class DependencyMethodPartialDependency(TypedDict):
+
+    """ Keyword Arguments for the dep.partial_dependency methods """
+
+    compile_args: bool
+    link_args: bool
+    links: bool
+    includes: bool
+    sources: bool
+
+class BuildTargeMethodExtractAllObjects(TypedDict):
+    recursive: bool
+
+class FuncInstallSubdir(TypedDict):
+
+    install_dir: str
+    strip_directory: bool
+    exclude_files: T.List[str]
+    exclude_directories: T.List[str]
+    install_mode: FileMode
+
+
+class FuncInstallData(TypedDict):
+
+    install_dir: str
+    sources: T.List[FileOrString]
+    rename: T.List[str]
+    install_mode: FileMode
+
+
+class FuncInstallHeaders(TypedDict):
+
+    install_dir: T.Optional[str]
+    install_mode: FileMode
+    subdir: T.Optional[str]
+
+
+class FuncInstallMan(TypedDict):
+
+    install_dir: T.Optional[str]
+    install_mode: FileMode
+    locale: T.Optional[str]
+
+
+class FuncImportModule(ExtractRequired):
+
+    disabler: bool
diff --git a/meson/mesonbuild/interpreter/mesonmain.py b/meson/mesonbuild/interpreter/mesonmain.py
new file mode 100644
index 000000000..97a695b9d
--- /dev/null
+++ b/meson/mesonbuild/interpreter/mesonmain.py
@@ -0,0 +1,382 @@
+import os
+
+from .. import mesonlib
+from .. import dependencies
+from .. import build
+from .. import mlog
+
+from ..mesonlib import MachineChoice, OptionKey
+from ..programs import OverrideProgram, ExternalProgram
+from ..interpreterbase import (MesonInterpreterObject, FeatureNewKwargs, FeatureNew, FeatureDeprecated,
+                               typed_pos_args, permittedKwargs, noArgsFlattening, noPosargs, noKwargs,
+                               MesonVersionString, InterpreterException)
+
+from .interpreterobjects import (ExecutableHolder, ExternalProgramHolder,
+                                 CustomTargetHolder, CustomTargetIndexHolder,
+                                 EnvironmentVariablesObject)
+
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .interpreter import Interpreter
+
+class MesonMain(MesonInterpreterObject):
+    def __init__(self, build: 'build.Build', interpreter: 'Interpreter'):
+        super().__init__(subproject=interpreter.subproject)
+        self.build = build
+        self.interpreter = interpreter
+        self.methods.update({'get_compiler': self.get_compiler_method,
+                             'is_cross_build': self.is_cross_build_method,
+                             'has_exe_wrapper': self.has_exe_wrapper_method,
+                             'can_run_host_binaries': self.can_run_host_binaries_method,
+                             'is_unity': self.is_unity_method,
+                             'is_subproject': self.is_subproject_method,
+                             'current_source_dir': self.current_source_dir_method,
+                             'current_build_dir': self.current_build_dir_method,
+                             'source_root': self.source_root_method,
+                             'build_root': self.build_root_method,
+                             'project_source_root': self.project_source_root_method,
+                             'project_build_root': self.project_build_root_method,
+                             'global_source_root': self.global_source_root_method,
+                             'global_build_root': self.global_build_root_method,
+                             'add_install_script': self.add_install_script_method,
+                             'add_postconf_script': self.add_postconf_script_method,
+                             'add_dist_script': self.add_dist_script_method,
+                             'install_dependency_manifest': self.install_dependency_manifest_method,
+                             'override_dependency': self.override_dependency_method,
+                             'override_find_program': self.override_find_program_method,
+                             'project_version': self.project_version_method,
+                             'project_license': self.project_license_method,
+                             'version': self.version_method,
+                             'project_name': self.project_name_method,
+                             'get_cross_property': self.get_cross_property_method,
+                             'get_external_property': self.get_external_property_method,
+                             'has_external_property': self.has_external_property_method,
+                             'backend': self.backend_method,
+                             'add_devenv': self.add_devenv_method,
+                             })
+
+    def _find_source_script(self, prog: T.Union[str, mesonlib.File, build.Executable, ExternalProgram], args):
+
+        if isinstance(prog, (build.Executable, ExternalProgram)):
+            return self.interpreter.backend.get_executable_serialisation([prog] + args)
+        found = self.interpreter.func_find_program({}, prog, {})
+        es = self.interpreter.backend.get_executable_serialisation([found] + args)
+        es.subproject = self.interpreter.subproject
+        return es
+
+    def _process_script_args(
+            self, name: str, args: T.List[T.Union[
+                str, mesonlib.File, CustomTargetHolder,
+                CustomTargetIndexHolder,
+                ExternalProgramHolder, ExecutableHolder,
+            ]], allow_built: bool = False) -> T.List[str]:
+        script_args = []  # T.List[str]
+        new = False
+        for a in args:
+            if isinstance(a, str):
+                script_args.append(a)
+            elif isinstance(a, mesonlib.File):
+                new = True
+                script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+            elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+                if not allow_built:
+                    raise InterpreterException(f'Arguments to {name} cannot be built')
+                new = True
+                script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+                # This feels really hacky, but I'm not sure how else to fix
+                # this without completely rewriting install script handling.
+                # This is complicated by the fact that the install target
+                # depends on all.
+                if isinstance(a, build.CustomTargetIndex):
+                    a.target.build_by_default = True
+                else:
+                    a.build_by_default = True
+            elif isinstance(a, ExternalProgram):
+                script_args.extend(a.command)
+                new = True
+            else:
+                raise InterpreterException(
+                   f'Arguments to {name} must be strings, Files, or CustomTargets, '
+                    'Indexes of CustomTargets')
+        if new:
+            FeatureNew.single_use(
+                f'Calling "{name}" with File, CustomTaget, Index of CustomTarget, '
+                'Executable, or ExternalProgram',
+                '0.55.0', self.interpreter.subproject)
+        return script_args
+
+    @FeatureNewKwargs('add_install_script', '0.57.0', ['skip_if_destdir'])
+    @permittedKwargs({'skip_if_destdir'})
+    def add_install_script_method(self, args: 'T.Tuple[T.Union[str, mesonlib.File, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder], ...]', kwargs):
+        if len(args) < 1:
+            raise InterpreterException('add_install_script takes one or more arguments')
+        if isinstance(args[0], mesonlib.File):
+            FeatureNew.single_use('Passing file object to script parameter of add_install_script',
+                                  '0.57.0', self.interpreter.subproject)
+        skip_if_destdir = kwargs.get('skip_if_destdir', False)
+        if not isinstance(skip_if_destdir, bool):
+            raise InterpreterException('skip_if_destdir keyword argument must be boolean')
+        script_args = self._process_script_args('add_install_script', args[1:], allow_built=True)
+        script = self._find_source_script(args[0], script_args)
+        script.skip_if_destdir = skip_if_destdir
+        self.build.install_scripts.append(script)
+
+    @permittedKwargs(set())
+    def add_postconf_script_method(self, args, kwargs):
+        if len(args) < 1:
+            raise InterpreterException('add_postconf_script takes one or more arguments')
+        if isinstance(args[0], mesonlib.File):
+            FeatureNew.single_use('Passing file object to script parameter of add_postconf_script',
+                                  '0.57.0', self.interpreter.subproject)
+        script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True)
+        script = self._find_source_script(args[0], script_args)
+        self.build.postconf_scripts.append(script)
+
+    @permittedKwargs(set())
+    def add_dist_script_method(self, args, kwargs):
+        if len(args) < 1:
+            raise InterpreterException('add_dist_script takes one or more arguments')
+        if len(args) > 1:
+            FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+                                  '0.49.0', self.interpreter.subproject)
+        if isinstance(args[0], mesonlib.File):
+            FeatureNew.single_use('Passing file object to script parameter of add_dist_script',
+                                  '0.57.0', self.interpreter.subproject)
+        if self.interpreter.subproject != '':
+            FeatureNew.single_use('Calling "add_dist_script" in a subproject',
+                                  '0.58.0', self.interpreter.subproject)
+        script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True)
+        script = self._find_source_script(args[0], script_args)
+        self.build.dist_scripts.append(script)
+
+    @noPosargs
+    @permittedKwargs({})
+    def current_source_dir_method(self, args, kwargs):
+        src = self.interpreter.environment.source_dir
+        sub = self.interpreter.subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @permittedKwargs({})
+    def current_build_dir_method(self, args, kwargs):
+        src = self.interpreter.environment.build_dir
+        sub = self.interpreter.subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @permittedKwargs({})
+    def backend_method(self, args, kwargs):
+        return self.interpreter.backend.name
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.project_source_root() or meson.global_source_root() instead.')
+    def source_root_method(self, args, kwargs):
+        return self.interpreter.environment.source_dir
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.project_build_root() or meson.global_build_root() instead.')
+    def build_root_method(self, args, kwargs):
+        return self.interpreter.environment.build_dir
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('meson.project_source_root', '0.56.0')
+    def project_source_root_method(self, args, kwargs):
+        src = self.interpreter.environment.source_dir
+        sub = self.interpreter.root_subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('meson.project_build_root', '0.56.0')
+    def project_build_root_method(self, args, kwargs):
+        src = self.interpreter.environment.build_dir
+        sub = self.interpreter.root_subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.global_source_root', '0.58.0')
+    def global_source_root_method(self, args, kwargs):
+        return self.interpreter.environment.source_dir
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.global_build_root', '0.58.0')
+    def global_build_root_method(self, args, kwargs):
+        return self.interpreter.environment.build_dir
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+    def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+        return self.can_run_host_binaries_impl(args, kwargs)
+
+    @noPosargs
+    @permittedKwargs({})
+    @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+    def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+        return self.can_run_host_binaries_impl(args, kwargs)
+
+    def can_run_host_binaries_impl(self, args, kwargs):
+        if (self.is_cross_build_method(None, None) and
+                self.build.environment.need_exe_wrapper()):
+            if self.build.environment.exe_wrapper is None:
+                return False
+        # We return True when exe_wrap is defined, when it's not needed, and
+        # when we're compiling natively. The last two are semantically confusing.
+        # Need to revisit this.
+        return True
+
+    @noPosargs
+    @permittedKwargs({})
+    def is_cross_build_method(self, args, kwargs):
+        return self.build.environment.is_cross_build()
+
+    @permittedKwargs({'native'})
+    def get_compiler_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('get_compiler_method must have one and only one argument.')
+        cname = args[0]
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        clist = self.interpreter.coredata.compilers[for_machine]
+        if cname in clist:
+            return clist[cname]
+        raise InterpreterException(f'Tried to access compiler for language "{cname}", not specified for {for_machine.get_lower_case_name()} machine.')
+
+    @noPosargs
+    @permittedKwargs({})
+    def is_unity_method(self, args, kwargs):
+        optval = self.interpreter.environment.coredata.get_option(OptionKey('unity'))
+        if optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject()):
+            return True
+        return False
+
+    @noPosargs
+    @permittedKwargs({})
+    def is_subproject_method(self, args, kwargs):
+        return self.interpreter.is_subproject()
+
+    @permittedKwargs({})
+    def install_dependency_manifest_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Must specify manifest install file name')
+        if not isinstance(args[0], str):
+            raise InterpreterException('Argument must be a string.')
+        self.build.dep_manifest_name = args[0]
+
+    @FeatureNew('meson.override_find_program', '0.46.0')
+    @permittedKwargs({})
+    def override_find_program_method(self, args, kwargs):
+        if len(args) != 2:
+            raise InterpreterException('Override needs two arguments')
+        name, exe = args
+        if not isinstance(name, str):
+            raise InterpreterException('First argument must be a string')
+        if isinstance(exe, mesonlib.File):
+            abspath = exe.absolute_path(self.interpreter.environment.source_dir,
+                                        self.interpreter.environment.build_dir)
+            if not os.path.exists(abspath):
+                raise InterpreterException('Tried to override %s with a file that does not exist.' % name)
+            exe = OverrideProgram(name, abspath)
+        if not isinstance(exe, (ExternalProgram, build.Executable)):
+            raise InterpreterException('Second argument must be an external program or executable.')
+        self.interpreter.add_find_program_override(name, exe)
+
+    @FeatureNew('meson.override_dependency', '0.54.0')
+    @permittedKwargs({'native'})
+    def override_dependency_method(self, args, kwargs):
+        if len(args) != 2:
+            raise InterpreterException('Override needs two arguments')
+        name = args[0]
+        dep = args[1]
+        if not isinstance(name, str) or not name:
+            raise InterpreterException('First argument must be a string and cannot be empty')
+        if not isinstance(dep, dependencies.Dependency):
+            raise InterpreterException('Second argument must be a dependency object')
+        identifier = dependencies.get_dep_identifier(name, kwargs)
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        override = self.build.dependency_overrides[for_machine].get(identifier)
+        if override:
+            m = 'Tried to override dependency {!r} which has already been resolved or overridden at {}'
+            location = mlog.get_error_location_string(override.node.filename, override.node.lineno)
+            raise InterpreterException(m.format(name, location))
+        self.build.dependency_overrides[for_machine][identifier] = \
+            build.DependencyOverride(dep, self.interpreter.current_node)
+
+    @noPosargs
+    @permittedKwargs({})
+    def project_version_method(self, args, kwargs):
+        return self.build.dep_manifest[self.interpreter.active_projectname]['version']
+
+    @FeatureNew('meson.project_license()', '0.45.0')
+    @noPosargs
+    @permittedKwargs({})
+    def project_license_method(self, args, kwargs):
+        return self.build.dep_manifest[self.interpreter.active_projectname]['license']
+
+    @noPosargs
+    @permittedKwargs({})
+    def version_method(self, args, kwargs):
+        return MesonVersionString(self.interpreter.coredata.version)
+
+    @noPosargs
+    @permittedKwargs({})
+    def project_name_method(self, args, kwargs):
+        return self.interpreter.active_projectname
+
+    def __get_external_property_impl(self, propname: str, fallback: T.Optional[object], machine: MachineChoice) -> object:
+        """Shared implementation for get_cross_property and get_external_property."""
+        try:
+            return self.interpreter.environment.properties[machine][propname]
+        except KeyError:
+            if fallback is not None:
+                return fallback
+            raise InterpreterException(f'Unknown property for {machine.get_lower_case_name()} machine: {propname}')
+
+    @noArgsFlattening
+    @permittedKwargs({})
+    @FeatureDeprecated('meson.get_cross_property', '0.58.0', 'Use meson.get_external_property() instead')
+    @typed_pos_args('meson.get_cross_property', str, optargs=[object])
+    def get_cross_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: T.Dict[str, T.Any]) -> object:
+        propname, fallback = args
+        return self.__get_external_property_impl(propname, fallback, MachineChoice.HOST)
+
+    @noArgsFlattening
+    @permittedKwargs({'native'})
+    @FeatureNew('meson.get_external_property', '0.54.0')
+    @typed_pos_args('meson.get_external_property', str, optargs=[object])
+    def get_external_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: T.Dict[str, T.Any]) -> object:
+        propname, fallback = args
+        machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        return self.__get_external_property_impl(propname, fallback, machine)
+
+
+    @permittedKwargs({'native'})
+    @FeatureNew('meson.has_external_property', '0.58.0')
+    @typed_pos_args('meson.has_external_property', str)
+    def has_external_property_method(self, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+        prop_name = args[0]
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        return prop_name in self.interpreter.environment.properties[for_machine]
+
+    @FeatureNew('add_devenv', '0.58.0')
+    @noKwargs
+    @typed_pos_args('add_devenv', (str, list, dict, EnvironmentVariablesObject))
+    def add_devenv_method(self, args: T.Union[str, list, dict, EnvironmentVariablesObject], kwargs: T.Dict[str, T.Any]) -> None:
+        env = args[0]
+        if isinstance(env, (str, list, dict)):
+            env = EnvironmentVariablesObject(env)
+        self.build.devenv.append(env.vars)
diff --git a/meson/mesonbuild/interpreterbase/__init__.py b/meson/mesonbuild/interpreterbase/__init__.py
new file mode 100644
index 000000000..8e45cdb8f
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/__init__.py
@@ -0,0 +1,122 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__all__ = [
+    'InterpreterObject',
+    'MesonInterpreterObject',
+    'ObjectHolder',
+    'RangeHolder',
+    'MesonVersionString',
+    'MutableInterpreterObject',
+
+    'Disabler',
+    'is_disabled',
+
+    'InterpreterException',
+    'InvalidCode',
+    'InvalidArguments',
+    'SubdirDoneRequest',
+    'ContinueRequest',
+    'BreakRequest',
+
+    'check_stringlist',
+    'default_resolve_key',
+    'flatten',
+    'resolve_second_level_holders',
+
+    'noPosargs',
+    'builtinMethodNoKwargs',
+    'noKwargs',
+    'stringArgs',
+    'noArgsFlattening',
+    'noSecondLevelHolderResolving',
+    'permissive_unholder_return',
+    'disablerIfNotFound',
+    'permittedKwargs',
+    'typed_pos_args',
+    'ContainerTypeInfo',
+    'KwargInfo',
+    'typed_kwargs',
+    'FeatureCheckBase',
+    'FeatureNew',
+    'FeatureDeprecated',
+    'FeatureNewKwargs',
+    'FeatureDeprecatedKwargs',
+
+    'InterpreterBase',
+
+    'TV_fw_var',
+    'TV_fw_args',
+    'TV_fw_kwargs',
+    'TV_func',
+    'TYPE_elementary',
+    'TYPE_var',
+    'TYPE_nvar',
+    'TYPE_kwargs',
+    'TYPE_nkwargs',
+    'TYPE_key_resolver',
+]
+
+from .baseobjects import (
+    InterpreterObject,
+    MesonInterpreterObject,
+    ObjectHolder,
+    RangeHolder,
+    MutableInterpreterObject,
+
+    TV_fw_var,
+    TV_fw_args,
+    TV_fw_kwargs,
+    TV_func,
+    TYPE_elementary,
+    TYPE_var,
+    TYPE_nvar,
+    TYPE_kwargs,
+    TYPE_nkwargs,
+    TYPE_key_resolver,
+)
+
+from .decorators import (
+    noPosargs,
+    builtinMethodNoKwargs,
+    noKwargs,
+    stringArgs,
+    noArgsFlattening,
+    noSecondLevelHolderResolving,
+    permissive_unholder_return,
+    disablerIfNotFound,
+    permittedKwargs,
+    typed_pos_args,
+    ContainerTypeInfo,
+    KwargInfo,
+    typed_kwargs,
+    FeatureCheckBase,
+    FeatureNew,
+    FeatureDeprecated,
+    FeatureNewKwargs,
+    FeatureDeprecatedKwargs,
+)
+
+from .exceptions import (
+    InterpreterException,
+    InvalidCode,
+    InvalidArguments,
+    SubdirDoneRequest,
+    ContinueRequest,
+    BreakRequest,
+)
+
+from .disabler import Disabler, is_disabled
+from .helpers import check_stringlist, default_resolve_key, flatten, resolve_second_level_holders
+from .interpreterbase import MesonVersionString, InterpreterBase
diff --git a/meson/mesonbuild/interpreterbase/_unholder.py b/meson/mesonbuild/interpreterbase/_unholder.py
new file mode 100644
index 000000000..10c7cfc8f
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/_unholder.py
@@ -0,0 +1,39 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .baseobjects import InterpreterObject, MesonInterpreterObject, ObjectHolder, TYPE_var
+from .exceptions import InvalidArguments
+from ..mesonlib import HoldableObject, MesonBugException
+
+import typing as T
+
+def _unholder(obj: T.Union[TYPE_var, InterpreterObject], *, permissive: bool = False) -> TYPE_var:
+    if isinstance(obj, (int, bool, str)):
+        return obj
+    elif isinstance(obj, list):
+        return [_unholder(x, permissive=permissive) for x in obj]
+    elif isinstance(obj, dict):
+        return {k: _unholder(v, permissive=permissive) for k, v in obj.items()}
+    elif isinstance(obj, ObjectHolder):
+        assert isinstance(obj.held_object, HoldableObject)
+        return obj.held_object
+    elif isinstance(obj, MesonInterpreterObject):
+        return obj
+    elif isinstance(obj, HoldableObject) and permissive:
+        return obj
+    elif isinstance(obj, HoldableObject):
+        raise MesonBugException(f'Argument {obj} of type {type(obj).__name__} is not held by an ObjectHolder.')
+    elif isinstance(obj, InterpreterObject):
+        raise InvalidArguments(f'Argument {obj} of type {type(obj).__name__} cannot be passed to a method or function')
+    raise MesonBugException(f'Unknown object {obj} of type {type(obj).__name__} in the parameters.')
diff --git a/meson/mesonbuild/interpreterbase/baseobjects.py b/meson/mesonbuild/interpreterbase/baseobjects.py
new file mode 100644
index 000000000..8b1293ca2
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/baseobjects.py
@@ -0,0 +1,96 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mparser
+from .exceptions import InvalidCode
+from .helpers import flatten, resolve_second_level_holders
+from ..mesonlib import HoldableObject
+
+import typing as T
+
+if T.TYPE_CHECKING:
+    # Object holders need the actual interpreter
+    from ..interpreter import Interpreter
+
+TV_fw_var = T.Union[str, int, bool, list, dict, 'InterpreterObject']
+TV_fw_args = T.List[T.Union[mparser.BaseNode, TV_fw_var]]
+TV_fw_kwargs = T.Dict[str, T.Union[mparser.BaseNode, TV_fw_var]]
+
+TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
+
+TYPE_elementary = T.Union[str, int, bool, T.List[T.Any], T.Dict[str, T.Any]]
+TYPE_var = T.Union[TYPE_elementary, HoldableObject, 'MesonInterpreterObject']
+TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
+TYPE_kwargs = T.Dict[str, TYPE_var]
+TYPE_nkwargs = T.Dict[str, TYPE_nvar]
+TYPE_key_resolver = T.Callable[[mparser.BaseNode], str]
+
+class InterpreterObject:
+    def __init__(self, *, subproject: T.Optional[str] = None) -> None:
+        self.methods: T.Dict[
+            str,
+            T.Callable[[T.List[TYPE_var], TYPE_kwargs], TYPE_var]
+        ] = {}
+        # Current node set during a method call. This can be used as location
+        # when printing a warning message during a method call.
+        self.current_node:  mparser.BaseNode = None
+        self.subproject: str = subproject or ''
+
+    def method_call(
+                self,
+                method_name: str,
+                args: T.List[TYPE_var],
+                kwargs: TYPE_kwargs
+            ) -> TYPE_var:
+        if method_name in self.methods:
+            method = self.methods[method_name]
+            if not getattr(method, 'no-args-flattening', False):
+                args = flatten(args)
+            if not getattr(method, 'no-second-level-holder-flattening', False):
+                args, kwargs = resolve_second_level_holders(args, kwargs)
+            return method(args, kwargs)
+        raise InvalidCode(f'Unknown method "{method_name}" in object {self} of type {type(self).__name__}.')
+
+class MesonInterpreterObject(InterpreterObject):
+    ''' All non-elementary objects and non-object-holders should be derived from this '''
+
+class MutableInterpreterObject:
+    ''' Dummy class to mark the object type as mutable '''
+
+InterpreterObjectTypeVar = T.TypeVar('InterpreterObjectTypeVar', bound=HoldableObject)
+
+class ObjectHolder(InterpreterObject, T.Generic[InterpreterObjectTypeVar]):
+    def __init__(self, obj: InterpreterObjectTypeVar, interpreter: 'Interpreter') -> None:
+        super().__init__(subproject=interpreter.subproject)
+        assert isinstance(obj, HoldableObject), f'This is a bug: Trying to hold object of type `{type(obj).__name__}` that is not an `HoldableObject`'
+        self.held_object = obj
+        self.interpreter = interpreter
+        self.env = self.interpreter.environment
+
+    def __repr__(self) -> str:
+        return f'<[{type(self).__name__}] holds [{type(self.held_object).__name__}]: {self.held_object!r}>'
+
+class RangeHolder(MesonInterpreterObject):
+    def __init__(self, start: int, stop: int, step: int, *, subproject: str) -> None:
+        super().__init__(subproject=subproject)
+        self.range = range(start, stop, step)
+
+    def __iter__(self) -> T.Iterator[int]:
+        return iter(self.range)
+
+    def __getitem__(self, key: int) -> int:
+        return self.range[key]
+
+    def __len__(self) -> int:
+        return len(self.range)
diff --git a/meson/mesonbuild/interpreterbase/decorators.py b/meson/mesonbuild/interpreterbase/decorators.py
new file mode 100644
index 000000000..eabc6d864
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/decorators.py
@@ -0,0 +1,650 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, mlog
+from .baseobjects import TV_func, TYPE_var
+from .disabler import Disabler
+from .exceptions import InterpreterException, InvalidArguments
+from .helpers import check_stringlist, get_callee_args
+from ._unholder import _unholder
+
+from functools import wraps
+import abc
+import itertools
+import typing as T
+
+def noPosargs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        args = get_callee_args(wrapped_args)[2]
+        if args:
+            raise InvalidArguments('Function does not take positional arguments.')
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast(TV_func, wrapped)
+
+def builtinMethodNoKwargs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        node = wrapped_args[0].current_node
+        method_name = wrapped_args[2]
+        kwargs = wrapped_args[4]
+        if kwargs:
+            mlog.warning(f'Method {method_name!r} does not take keyword arguments.',
+                         'This will become a hard error in the future',
+                         location=node)
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast(TV_func, wrapped)
+
+def noKwargs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        kwargs = get_callee_args(wrapped_args)[3]
+        if kwargs:
+            raise InvalidArguments('Function does not take keyword arguments.')
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast(TV_func, wrapped)
+
+def stringArgs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        args = get_callee_args(wrapped_args)[2]
+        assert(isinstance(args, list))
+        check_stringlist(args)
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast(TV_func, wrapped)
+
+def noArgsFlattening(f: TV_func) -> TV_func:
+    setattr(f, 'no-args-flattening', True)  # noqa: B010
+    return f
+
+def noSecondLevelHolderResolving(f: TV_func) -> TV_func:
+    setattr(f, 'no-second-level-holder-flattening', True)  # noqa: B010
+    return f
+
+def permissive_unholder_return(f: TV_func) -> T.Callable[..., TYPE_var]:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        res = f(*wrapped_args, **wrapped_kwargs)
+        return _unholder(res, permissive=True)
+    return T.cast(T.Callable[..., TYPE_var], wrapped)
+
+def disablerIfNotFound(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        kwargs = get_callee_args(wrapped_args)[3]
+        disabler = kwargs.pop('disabler', False)
+        ret = f(*wrapped_args, **wrapped_kwargs)
+        if disabler and not ret.found():
+            return Disabler()
+        return ret
+    return T.cast(TV_func, wrapped)
+
+class permittedKwargs:
+
+    def __init__(self, permitted: T.Set[str]):
+        self.permitted = permitted  # type: T.Set[str]
+
+    def __call__(self, f: TV_func) -> TV_func:
+        @wraps(f)
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            s, node, args, kwargs, _ = get_callee_args(wrapped_args)
+            for k in kwargs:
+                if k not in self.permitted:
+                    mlog.warning(f'''Passed invalid keyword argument "{k}".''', location=node)
+                    mlog.warning('This will become a hard error in the future.')
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast(TV_func, wrapped)
+
+
+def typed_pos_args(name: str, *types: T.Union[T.Type, T.Tuple[T.Type, ...]],
+                   varargs: T.Optional[T.Union[T.Type, T.Tuple[T.Type, ...]]] = None,
+                   optargs: T.Optional[T.List[T.Union[T.Type, T.Tuple[T.Type, ...]]]] = None,
+                   min_varargs: int = 0, max_varargs: int = 0) -> T.Callable[..., T.Any]:
+    """Decorator that types type checking of positional arguments.
+
+    This supports two different models of optional aguments, the first is the
+    variadic argument model. Variadic arguments are a possibly bounded,
+    possibly unbounded number of arguments of the same type (unions are
+    supported). The second is the standard default value model, in this case
+    a number of optional arguments may be provided, but they are still
+    ordered, and they may have different types.
+
+    This function does not support mixing variadic and default arguments.
+
+    :name: The name of the decorated function (as displayed in error messages)
+    :varargs: They type(s) of any variadic arguments the function takes. If
+        None the function takes no variadic args
+    :min_varargs: the minimum number of variadic arguments taken
+    :max_varargs: the maximum number of variadic arguments taken. 0 means unlimited
+    :optargs: The types of any optional arguments parameters taken. If None
+        then no optional paramters are taken.
+
+    Some examples of usage blow:
+    >>> @typed_pos_args('mod.func', str, (str, int))
+    ... def func(self, state: ModuleState, args: T.Tuple[str, T.Union[str, int]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    >>> @typed_pos_args('method', str, varargs=str)
+    ... def method(self, node: BaseNode, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    >>> @typed_pos_args('method', varargs=str, min_varargs=1)
+    ... def method(self, node: BaseNode, args: T.Tuple[T.List[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    >>> @typed_pos_args('method', str, optargs=[(str, int), str])
+    ... def method(self, node: BaseNode, args: T.Tuple[str, T.Optional[T.Union[str, int]], T.Optional[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    When should you chose `typed_pos_args('name', varargs=str,
+    min_varargs=1)` vs `typed_pos_args('name', str, varargs=str)`?
+
+    The answer has to do with the semantics of the function, if all of the
+    inputs are the same type (such as with `files()`) then the former is
+    correct, all of the arguments are string names of files. If the first
+    argument is something else the it should be separated.
+    """
+    def inner(f: TV_func) -> TV_func:
+
+        @wraps(f)
+        def wrapper(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            args = get_callee_args(wrapped_args)[2]
+
+            # These are implementation programming errors, end users should never see them.
+            assert isinstance(args, list), args
+            assert max_varargs >= 0, 'max_varags cannot be negative'
+            assert min_varargs >= 0, 'min_varags cannot be negative'
+            assert optargs is None or varargs is None, \
+                'varargs and optargs not supported together as this would be ambiguous'
+
+            num_args = len(args)
+            num_types = len(types)
+            a_types = types
+
+            if varargs:
+                min_args = num_types + min_varargs
+                max_args = num_types + max_varargs
+                if max_varargs == 0 and num_args < min_args:
+                    raise InvalidArguments(f'{name} takes at least {min_args} arguments, but got {num_args}.')
+                elif max_varargs != 0 and (num_args < min_args or num_args > max_args):
+                    raise InvalidArguments(f'{name} takes between {min_args} and {max_args} arguments, but got {num_args}.')
+            elif optargs:
+                if num_args < num_types:
+                    raise InvalidArguments(f'{name} takes at least {num_types} arguments, but got {num_args}.')
+                elif num_args > num_types + len(optargs):
+                    raise InvalidArguments(f'{name} takes at most {num_types + len(optargs)} arguments, but got {num_args}.')
+                # Add the number of positional arguments required
+                if num_args > num_types:
+                    diff = num_args - num_types
+                    a_types = tuple(list(types) + list(optargs[:diff]))
+            elif num_args != num_types:
+                raise InvalidArguments(f'{name} takes exactly {num_types} arguments, but got {num_args}.')
+
+            for i, (arg, type_) in enumerate(itertools.zip_longest(args, a_types, fillvalue=varargs), start=1):
+                if not isinstance(arg, type_):
+                    if isinstance(type_, tuple):
+                        shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in type_))
+                    else:
+                        shouldbe = f'"{type_.__name__}"'
+                    raise InvalidArguments(f'{name} argument {i} was of type "{type(arg).__name__}" but should have been {shouldbe}')
+
+            # Ensure that we're actually passing a tuple.
+            # Depending on what kind of function we're calling the length of
+            # wrapped_args can vary.
+            nargs = list(wrapped_args)
+            i = nargs.index(args)
+            if varargs:
+                # if we have varargs we need to split them into a separate
+                # tuple, as python's typing doesn't understand tuples with
+                # fixed elements and variadic elements, only one or the other.
+                # so in that case we need T.Tuple[int, str, float, T.Tuple[str, ...]]
+                pos = args[:len(types)]
+                var = list(args[len(types):])
+                pos.append(var)
+                nargs[i] = tuple(pos)
+            elif optargs:
+                if num_args < num_types + len(optargs):
+                    diff =  num_types + len(optargs) - num_args
+                    nargs[i] = tuple(list(args) + [None] * diff)
+                else:
+                    nargs[i] = args
+            else:
+                nargs[i] = tuple(args)
+            return f(*nargs, **wrapped_kwargs)
+
+        return T.cast(TV_func, wrapper)
+    return inner
+
+
+class ContainerTypeInfo:
+
+    """Container information for keyword arguments.
+
+    For keyword arguments that are containers (list or dict), this class encodes
+    that information.
+
+    :param container: the type of container
+    :param contains: the types the container holds
+    :param pairs: if the container is supposed to be of even length.
+        This is mainly used for interfaces that predate the addition of dictionaries, and use
+        `[key, value, key2, value2]` format.
+    :param allow_empty: Whether this container is allowed to be empty
+        There are some cases where containers not only must be passed, but must
+        not be empty, and other cases where an empty container is allowed.
+    """
+
+    def __init__(self, container: T.Type, contains: T.Union[T.Type, T.Tuple[T.Type, ...]], *,
+                 pairs: bool = False, allow_empty: bool = True) :
+        self.container = container
+        self.contains = contains
+        self.pairs = pairs
+        self.allow_empty = allow_empty
+
+    def check(self, value: T.Any) -> T.Optional[str]:
+        """Check that a value is valid.
+
+        :param value: A value to check
+        :return: If there is an error then a string message, otherwise None
+        """
+        if not isinstance(value, self.container):
+            return f'container type was "{type(value).__name__}", but should have been "{self.container.__name__}"'
+        iter_ = iter(value.values()) if isinstance(value, dict) else iter(value)
+        for each in iter_:
+            if not isinstance(each, self.contains):
+                if isinstance(self.contains, tuple):
+                    shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in self.contains))
+                else:
+                    shouldbe = f'"{self.contains.__name__}"'
+                return f'contained a value of type "{type(each).__name__}" but should have been {shouldbe}'
+        if self.pairs and len(value) % 2 != 0:
+            return 'container should be of even length, but is not'
+        if not value and not self.allow_empty:
+            return 'container is empty, but not allowed to be'
+        return None
+
+
+_T = T.TypeVar('_T')
+
+class _NULL_T:
+    """Special null type for evolution, this is an implementation detail."""
+
+
+_NULL = _NULL_T()
+
+class KwargInfo(T.Generic[_T]):
+
+    """A description of a keyword argument to a meson function
+
+    This is used to describe a value to the :func:typed_kwargs function.
+
+    :param name: the name of the parameter
+    :param types: A type or tuple of types that are allowed, or a :class:ContainerType
+    :param required: Whether this is a required keyword argument. defaults to False
+    :param listify: If true, then the argument will be listified before being
+        checked. This is useful for cases where the Meson DSL allows a scalar or
+        a container, but internally we only want to work with containers
+    :param default: A default value to use if this isn't set. defaults to None,
+        this may be safely set to a mutable type, as long as that type does not
+        itself contain mutable types, typed_kwargs will copy the default
+    :param since: Meson version in which this argument has been added. defaults to None
+    :param deprecated: Meson version in which this argument has been deprecated. defaults to None
+    :param validator: A callable that does additional validation. This is mainly
+        intended for cases where a string is expected, but only a few specific
+        values are accepted. Must return None if the input is valid, or a
+        message if the input is invalid
+    :param convertor: A callable that converts the raw input value into a
+        different type. This is intended for cases such as the meson DSL using a
+        string, but the implementation using an Enum. This should not do
+        validation, just converstion.
+    :param deprecated_values: a dictionary mapping a value to the version of
+        meson it was deprecated in.
+    :param since_values: a dictionary mapping a value to the version of meson it was
+        added in.
+    :param not_set_warning: A warning messsage that is logged if the kwarg is not
+        set by the user.
+    """
+
+    def __init__(self, name: str, types: T.Union[T.Type[_T], T.Tuple[T.Type[_T], ...], ContainerTypeInfo],
+                 *, required: bool = False, listify: bool = False,
+                 default: T.Optional[_T] = None,
+                 since: T.Optional[str] = None,
+                 since_values: T.Optional[T.Dict[str, str]] = None,
+                 deprecated: T.Optional[str] = None,
+                 deprecated_values: T.Optional[T.Dict[str, str]] = None,
+                 validator: T.Optional[T.Callable[[_T], T.Optional[str]]] = None,
+                 convertor: T.Optional[T.Callable[[_T], TYPE_var]] = None,
+                 not_set_warning: T.Optional[str] = None):
+        self.name = name
+        self.types = types
+        self.required = required
+        self.listify = listify
+        self.default = default
+        self.since_values = since_values
+        self.since = since
+        self.deprecated = deprecated
+        self.deprecated_values = deprecated_values
+        self.validator = validator
+        self.convertor = convertor
+        self.not_set_warning = not_set_warning
+
+    def evolve(self, *,
+               required: T.Union[bool, _NULL_T] = _NULL,
+               listify: T.Union[bool, _NULL_T] = _NULL,
+               default: T.Union[_T, None, _NULL_T] = _NULL,
+               since: T.Union[str, None, _NULL_T] = _NULL,
+               since_values: T.Union[T.Dict[str, str], None, _NULL_T] = _NULL,
+               deprecated: T.Union[str, None, _NULL_T] = _NULL,
+               deprecated_values: T.Union[T.Dict[str, str], None, _NULL_T] = _NULL,
+               validator: T.Union[T.Callable[[_T], T.Optional[str]], None, _NULL_T] = _NULL,
+               convertor: T.Union[T.Callable[[_T], TYPE_var], None, _NULL_T] = _NULL) -> 'KwargInfo':
+        """Create a shallow copy of this KwargInfo, with modifications.
+
+        This allows us to create a new copy of a KwargInfo with modifications.
+        This allows us to use a shared kwarg that implements complex logic, but
+        has slight differences in usage, such as being added to different
+        functions in different versions of Meson.
+
+        The use the _NULL special value here allows us to pass None, which has
+        meaning in many of these cases. _NULL itself is never stored, always
+        being replaced by either the copy in self, or the provided new version.
+        """
+        return type(self)(
+            self.name,
+            self.types,
+            listify=listify if not isinstance(listify, _NULL_T) else self.listify,
+            required=required if not isinstance(required, _NULL_T) else self.required,
+            default=default if not isinstance(default, _NULL_T) else self.default,
+            since=since if not isinstance(since, _NULL_T) else self.since,
+            since_values=since_values if not isinstance(since_values, _NULL_T) else self.since_values,
+            deprecated=deprecated if not isinstance(deprecated, _NULL_T) else self.deprecated,
+            deprecated_values=deprecated_values if not isinstance(deprecated_values, _NULL_T) else self.deprecated_values,
+            validator=validator if not isinstance(validator, _NULL_T) else self.validator,
+            convertor=convertor if not isinstance(convertor, _NULL_T) else self.convertor,
+        )
+
+
+
+def typed_kwargs(name: str, *types: KwargInfo) -> T.Callable[..., T.Any]:
+    """Decorator for type checking keyword arguments.
+
+    Used to wrap a meson DSL implementation function, where it checks various
+    things about keyword arguments, including the type, and various other
+    information. For non-required values it sets the value to a default, which
+    means the value will always be provided.
+
+    If type tyhpe is a :class:ContainerTypeInfo, then the default value will be
+    passed as an argument to the container initializer, making a shallow copy
+
+    :param name: the name of the function, including the object it's attached ot
+        (if applicable)
+    :param *types: KwargInfo entries for each keyword argument.
+    """
+    def inner(f: TV_func) -> TV_func:
+
+        @wraps(f)
+        def wrapper(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            kwargs, subproject = get_callee_args(wrapped_args, want_subproject=True)[3:5]
+
+            all_names = {t.name for t in types}
+            unknowns = set(kwargs).difference(all_names)
+            if unknowns:
+                # Warn about unknown argumnts, delete them and continue. This
+                # keeps current behavior
+                ustr = ', '.join([f'"{u}"' for u in sorted(unknowns)])
+                mlog.warning(f'{name} got unknown keyword arguments {ustr}')
+                for u in unknowns:
+                    del kwargs[u]
+
+            for info in types:
+                value = kwargs.get(info.name)
+                if value is not None:
+                    if info.since:
+                        feature_name = info.name + ' arg in ' + name
+                        FeatureNew.single_use(feature_name, info.since, subproject)
+                    if info.deprecated:
+                        feature_name = info.name + ' arg in ' + name
+                        FeatureDeprecated.single_use(feature_name, info.deprecated, subproject)
+                    if info.listify:
+                        kwargs[info.name] = value = mesonlib.listify(value)
+                    if isinstance(info.types, ContainerTypeInfo):
+                        msg = info.types.check(value)
+                        if msg is not None:
+                            raise InvalidArguments(f'{name} keyword argument "{info.name}" {msg}')
+                    else:
+                        if not isinstance(value, info.types):
+                            if isinstance(info.types, tuple):
+                                shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in info.types))
+                            else:
+                                shouldbe = f'"{info.types.__name__}"'
+                            raise InvalidArguments(f'{name} keyword argument "{info.name}"" was of type "{type(value).__name__}" but should have been {shouldbe}')
+
+                    if info.validator is not None:
+                        msg = info.validator(value)
+                        if msg is not None:
+                            raise InvalidArguments(f'{name} keyword argument "{info.name}" {msg}')
+
+                    warn: bool
+                    if info.deprecated_values is not None:
+                        for n, version in info.deprecated_values.items():
+                            if isinstance(value, (dict, list)):
+                                warn = n in value
+                            else:
+                                warn = n == value
+
+                            if warn:
+                                FeatureDeprecated.single_use(f'"{name}" keyword argument "{info.name}" value "{n}"', version, subproject)
+
+                    if info.since_values is not None:
+                        for n, version in info.since_values.items():
+                            if isinstance(value, (dict, list)):
+                                warn = n in value
+                            else:
+                                warn = n == value
+
+                            if warn:
+                                FeatureNew.single_use(f'"{name}" keyword argument "{info.name}" value "{n}"', version, subproject)
+
+                elif info.required:
+                    raise InvalidArguments(f'{name} is missing required keyword argument "{info.name}"')
+                else:
+                    # set the value to the default, this ensuring all kwargs are present
+                    # This both simplifies the typing checking and the usage
+                    # Create a shallow copy of the container (and do a type
+                    # conversion if necessary). This allows mutable types to
+                    # be used safely as default values
+                    if isinstance(info.types, ContainerTypeInfo):
+                        kwargs[info.name] = info.types.container(info.default)
+                    else:
+                        kwargs[info.name] = info.default
+                    if info.not_set_warning:
+                        mlog.warning(info.not_set_warning)
+
+                if info.convertor:
+                    kwargs[info.name] = info.convertor(kwargs[info.name])
+
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast(TV_func, wrapper)
+    return inner
+
+
+class FeatureCheckBase(metaclass=abc.ABCMeta):
+    "Base class for feature version checks"
+
+    # In python 3.6 we can just forward declare this, but in 3.5 we can't
+    # This will be overwritten by the subclasses by necessity
+    feature_registry = {}  # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+    def __init__(self, feature_name: str, version: str, extra_message: T.Optional[str] = None):
+        self.feature_name = feature_name  # type: str
+        self.feature_version = version    # type: str
+        self.extra_message = extra_message or ''  # type: str
+
+    @staticmethod
+    def get_target_version(subproject: str) -> str:
+        # Don't do any checks if project() has not been parsed yet
+        if subproject not in mesonlib.project_meson_versions:
+            return ''
+        return mesonlib.project_meson_versions[subproject]
+
+    @staticmethod
+    @abc.abstractmethod
+    def check_version(target_version: str, feature_Version: str) -> bool:
+        pass
+
+    def use(self, subproject: str) -> None:
+        tv = self.get_target_version(subproject)
+        # No target version
+        if tv == '':
+            return
+        # Target version is new enough
+        if self.check_version(tv, self.feature_version):
+            return
+        # Feature is too new for target version, register it
+        if subproject not in self.feature_registry:
+            self.feature_registry[subproject] = {self.feature_version: set()}
+        register = self.feature_registry[subproject]
+        if self.feature_version not in register:
+            register[self.feature_version] = set()
+        if self.feature_name in register[self.feature_version]:
+            # Don't warn about the same feature multiple times
+            # FIXME: This is needed to prevent duplicate warnings, but also
+            # means we won't warn about a feature used in multiple places.
+            return
+        register[self.feature_version].add(self.feature_name)
+        self.log_usage_warning(tv)
+
+    @classmethod
+    def report(cls, subproject: str) -> None:
+        if subproject not in cls.feature_registry:
+            return
+        warning_str = cls.get_warning_str_prefix(cls.get_target_version(subproject))
+        fv = cls.feature_registry[subproject]
+        for version in sorted(fv.keys()):
+            warning_str += '\n * {}: {}'.format(version, fv[version])
+        mlog.warning(warning_str)
+
+    def log_usage_warning(self, tv: str) -> None:
+        raise InterpreterException('log_usage_warning not implemented')
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        raise InterpreterException('get_warning_str_prefix not implemented')
+
+    def __call__(self, f: TV_func) -> TV_func:
+        @wraps(f)
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            subproject = get_callee_args(wrapped_args, want_subproject=True)[4]
+            if subproject is None:
+                raise AssertionError(f'{wrapped_args!r}')
+            self.use(subproject)
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast(TV_func, wrapped)
+
+    @classmethod
+    def single_use(cls, feature_name: str, version: str, subproject: str,
+                   extra_message: T.Optional[str] = None) -> None:
+        """Oneline version that instantiates and calls use()."""
+        cls(feature_name, version, extra_message).use(subproject)
+
+
+class FeatureNew(FeatureCheckBase):
+    """Checks for new features"""
+
+    # Class variable, shared across all instances
+    #
+    # Format: {subproject: {feature_version: set(feature_names)}}
+    feature_registry = {}  # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+    @staticmethod
+    def check_version(target_version: str, feature_version: str) -> bool:
+        return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        return f'Project specifies a minimum meson_version \'{tv}\' but uses features which were added in newer versions:'
+
+    def log_usage_warning(self, tv: str) -> None:
+        args = [
+            'Project targeting', f"'{tv}'",
+            'but tried to use feature introduced in',
+            f"'{self.feature_version}':",
+            f'{self.feature_name}.',
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.warning(*args)
+
+class FeatureDeprecated(FeatureCheckBase):
+    """Checks for deprecated features"""
+
+    # Class variable, shared across all instances
+    #
+    # Format: {subproject: {feature_version: set(feature_names)}}
+    feature_registry = {}  # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+    @staticmethod
+    def check_version(target_version: str, feature_version: str) -> bool:
+        # For deprecation checks we need to return the inverse of FeatureNew checks
+        return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        return 'Deprecated features used:'
+
+    def log_usage_warning(self, tv: str) -> None:
+        args = [
+            'Project targeting', f"'{tv}'",
+            'but tried to use feature deprecated since',
+            f"'{self.feature_version}':",
+            f'{self.feature_name}.',
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.warning(*args)
+
+
+class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
+
+    @property
+    @abc.abstractmethod
+    def feature_check_class(self) -> T.Type[FeatureCheckBase]:
+        pass
+
+    def __init__(self, feature_name: str, feature_version: str,
+                 kwargs: T.List[str], extra_message: T.Optional[str] = None):
+        self.feature_name = feature_name
+        self.feature_version = feature_version
+        self.kwargs = kwargs
+        self.extra_message = extra_message
+
+    def __call__(self, f: TV_func) -> TV_func:
+        @wraps(f)
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            kwargs, subproject = get_callee_args(wrapped_args, want_subproject=True)[3:5]
+            if subproject is None:
+                raise AssertionError(f'{wrapped_args!r}')
+            for arg in self.kwargs:
+                if arg not in kwargs:
+                    continue
+                name = arg + ' arg in ' + self.feature_name
+                self.feature_check_class.single_use(
+                        name, self.feature_version, subproject, self.extra_message)
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast(TV_func, wrapped)
+
+class FeatureNewKwargs(FeatureCheckKwargsBase):
+    feature_check_class = FeatureNew
+
+class FeatureDeprecatedKwargs(FeatureCheckKwargsBase):
+    feature_check_class = FeatureDeprecated
diff --git a/meson/mesonbuild/interpreterbase/disabler.py b/meson/mesonbuild/interpreterbase/disabler.py
new file mode 100644
index 000000000..81f526466
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/disabler.py
@@ -0,0 +1,42 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .baseobjects import MesonInterpreterObject
+import typing as T
+
+class Disabler(MesonInterpreterObject):
+    def __init__(self) -> None:
+        super().__init__()
+        self.methods.update({'found': self.found_method})
+
+    def found_method(self, args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
+        return False
+
+def _is_arg_disabled(arg: T.Any) -> bool:
+    if isinstance(arg, Disabler):
+        return True
+    if isinstance(arg, list):
+        for i in arg:
+            if _is_arg_disabled(i):
+                return True
+    return False
+
+def is_disabled(args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
+    for i in args:
+        if _is_arg_disabled(i):
+            return True
+    for i in kwargs.values():
+        if _is_arg_disabled(i):
+            return True
+    return False
diff --git a/meson/mesonbuild/interpreterbase/exceptions.py b/meson/mesonbuild/interpreterbase/exceptions.py
new file mode 100644
index 000000000..cdbe0fb3b
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/exceptions.py
@@ -0,0 +1,33 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..mesonlib import MesonException
+
+class InterpreterException(MesonException):
+    pass
+
+class InvalidCode(InterpreterException):
+    pass
+
+class InvalidArguments(InterpreterException):
+    pass
+
+class SubdirDoneRequest(BaseException):
+    pass
+
+class ContinueRequest(BaseException):
+    pass
+
+class BreakRequest(BaseException):
+    pass
diff --git a/meson/mesonbuild/interpreterbase/helpers.py b/meson/mesonbuild/interpreterbase/helpers.py
new file mode 100644
index 000000000..235257795
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/helpers.py
@@ -0,0 +1,118 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, mparser, mlog
+from .exceptions import InvalidArguments, InterpreterException
+
+import collections.abc
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .baseobjects import TYPE_var, TYPE_kwargs
+
+def flatten(args: T.Union['TYPE_var', T.List['TYPE_var']]) -> T.List['TYPE_var']:
+    if isinstance(args, mparser.StringNode):
+        assert isinstance(args.value, str)
+        return [args.value]
+    if not isinstance(args, collections.abc.Sequence):
+        return [args]
+    result: T.List['TYPE_var'] = []
+    for a in args:
+        if isinstance(a, list):
+            rest = flatten(a)
+            result = result + rest
+        elif isinstance(a, mparser.StringNode):
+            result.append(a.value)
+        else:
+            result.append(a)
+    return result
+
+def resolve_second_level_holders(args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.Tuple[T.List['TYPE_var'], 'TYPE_kwargs']:
+    def resolver(arg: 'TYPE_var') -> 'TYPE_var':
+        if isinstance(arg, list):
+            return [resolver(x) for x in arg]
+        if isinstance(arg, dict):
+            return {k: resolver(v) for k, v in arg.items()}
+        if isinstance(arg, mesonlib.SecondLevelHolder):
+            return arg.get_default_object()
+        return arg
+    return [resolver(x) for x in args], {k: resolver(v) for k, v in kwargs.items()}
+
+def check_stringlist(a: T.Any, msg: str = 'Arguments must be strings.') -> None:
+    if not isinstance(a, list):
+        mlog.debug('Not a list:', str(a))
+        raise InvalidArguments('Argument not a list.')
+    if not all(isinstance(s, str) for s in a):
+        mlog.debug('Element not a string:', str(a))
+        raise InvalidArguments(msg)
+
+def default_resolve_key(key: mparser.BaseNode) -> str:
+    if not isinstance(key, mparser.IdNode):
+        raise InterpreterException('Invalid kwargs format.')
+    return key.value
+
+def get_callee_args(wrapped_args: T.Sequence[T.Any], want_subproject: bool = False) -> T.Tuple[T.Any, mparser.BaseNode, T.List['TYPE_var'], 'TYPE_kwargs', T.Optional[str]]:
+    s = wrapped_args[0]
+    n = len(wrapped_args)
+    # Raise an error if the codepaths are not there
+    subproject = None  # type: T.Optional[str]
+    if want_subproject and n == 2:
+        if hasattr(s, 'subproject'):
+            # Interpreter base types have 2 args: self, node
+            node = wrapped_args[1]
+            # args and kwargs are inside the node
+            args = None
+            kwargs = None
+            subproject = s.subproject
+        elif hasattr(wrapped_args[1], 'subproject'):
+            # Module objects have 2 args: self, interpreter
+            node = wrapped_args[1].current_node
+            # args and kwargs are inside the node
+            args = None
+            kwargs = None
+            subproject = wrapped_args[1].subproject
+        else:
+            raise AssertionError(f'Unknown args: {wrapped_args!r}')
+    elif n == 3:
+        # Methods on objects (*Holder, MesonMain, etc) have 3 args: self, args, kwargs
+        node = s.current_node
+        args = wrapped_args[1]
+        kwargs = wrapped_args[2]
+        if want_subproject:
+            if hasattr(s, 'subproject'):
+                subproject = s.subproject
+            elif hasattr(s, 'interpreter'):
+                subproject = s.interpreter.subproject
+    elif n == 4:
+        # Meson functions have 4 args: self, node, args, kwargs
+        # Module functions have 4 args: self, state, args, kwargs
+        from .interpreterbase import InterpreterBase  # TODO: refactor to avoid this import
+        if isinstance(s, InterpreterBase):
+            node = wrapped_args[1]
+        else:
+            node = wrapped_args[1].current_node
+        args = wrapped_args[2]
+        kwargs = wrapped_args[3]
+        if want_subproject:
+            if isinstance(s, InterpreterBase):
+                subproject = s.subproject
+            else:
+                subproject = wrapped_args[1].subproject
+    else:
+        raise AssertionError(f'Unknown args: {wrapped_args!r}')
+    # Sometimes interpreter methods are called internally with None instead of
+    # empty list/dict
+    args = args if args is not None else []
+    kwargs = kwargs if kwargs is not None else {}
+    return s, node, args, kwargs, subproject
diff --git a/meson/mesonbuild/interpreterbase/interpreterbase.py b/meson/mesonbuild/interpreterbase/interpreterbase.py
new file mode 100644
index 000000000..115e24be0
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/interpreterbase.py
@@ -0,0 +1,959 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .. import mparser, mesonlib, mlog
+from .. import environment
+
+from .baseobjects import (
+    InterpreterObject,
+    MesonInterpreterObject,
+    MutableInterpreterObject,
+    InterpreterObjectTypeVar,
+    ObjectHolder,
+    RangeHolder,
+
+    TYPE_elementary,
+    TYPE_var,
+    TYPE_kwargs,
+)
+
+from .exceptions import (
+    InterpreterException,
+    InvalidCode,
+    InvalidArguments,
+    SubdirDoneRequest,
+    ContinueRequest,
+    BreakRequest
+)
+
+from .decorators import FeatureNew, builtinMethodNoKwargs
+from .disabler import Disabler, is_disabled
+from .helpers import check_stringlist, default_resolve_key, flatten, resolve_second_level_holders
+from ._unholder import _unholder
+
+import os, copy, re
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+
+HolderMapType = T.Dict[
+    T.Type[mesonlib.HoldableObject],
+    # For some reason, this has to be a callable and can't just be ObjectHolder[InterpreterObjectTypeVar]
+    T.Callable[[InterpreterObjectTypeVar, 'Interpreter'], ObjectHolder[InterpreterObjectTypeVar]]
+]
+
+FunctionType = T.Dict[
+    str,
+    T.Callable[[mparser.BaseNode, T.List[TYPE_var], T.Dict[str, TYPE_var]], TYPE_var]
+]
+
+class MesonVersionString(str):
+    pass
+
+class InterpreterBase:
+    elementary_types = (int, str, bool, list)
+
+    def __init__(self, source_root: str, subdir: str, subproject: str):
+        self.source_root = source_root
+        self.funcs: FunctionType = {}
+        self.builtin: T.Dict[str, InterpreterObject] = {}
+        # Holder maps store a mapping from an HoldableObject to a class ObjectHolder
+        self.holder_map: HolderMapType = {}
+        self.bound_holder_map: HolderMapType = {}
+        self.subdir = subdir
+        self.root_subdir = subdir
+        self.subproject = subproject
+        # TODO: This should actually be more strict: T.Union[TYPE_elementary, InterpreterObject]
+        self.variables: T.Dict[str, T.Union[TYPE_var, InterpreterObject]] = {}
+        self.argument_depth = 0
+        self.current_lineno = -1
+        # Current node set during a function call. This can be used as location
+        # when printing a warning message during a method call.
+        self.current_node = None  # type: mparser.BaseNode
+        # This is set to `version_string` when this statement is evaluated:
+        # meson.version().compare_version(version_string)
+        # If it was part of a if-clause, it is used to temporally override the
+        # current meson version target within that if-block.
+        self.tmp_meson_version = None # type: T.Optional[str]
+
+    def load_root_meson_file(self) -> None:
+        mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
+        if not os.path.isfile(mesonfile):
+            raise InvalidArguments('Missing Meson file in %s' % mesonfile)
+        with open(mesonfile, encoding='utf-8') as mf:
+            code = mf.read()
+        if code.isspace():
+            raise InvalidCode('Builder file is empty.')
+        assert(isinstance(code, str))
+        try:
+            self.ast = mparser.Parser(code, mesonfile).parse()
+        except mesonlib.MesonException as me:
+            me.file = mesonfile
+            raise me
+
+    def join_path_strings(self, args: T.Sequence[str]) -> str:
+        return os.path.join(*args).replace('\\', '/')
+
+    def parse_project(self) -> None:
+        """
+        Parses project() and initializes languages, compilers etc. Do this
+        early because we need this before we parse the rest of the AST.
+        """
+        self.evaluate_codeblock(self.ast, end=1)
+
+    def sanity_check_ast(self) -> None:
+        if not isinstance(self.ast, mparser.CodeBlockNode):
+            raise InvalidCode('AST is of invalid type. Possibly a bug in the parser.')
+        if not self.ast.lines:
+            raise InvalidCode('No statements in code.')
+        first = self.ast.lines[0]
+        if not isinstance(first, mparser.FunctionNode) or first.func_name != 'project':
+            raise InvalidCode('First statement must be a call to project')
+
+    def run(self) -> None:
+        # Evaluate everything after the first line, which is project() because
+        # we already parsed that in self.parse_project()
+        try:
+            self.evaluate_codeblock(self.ast, start=1)
+        except SubdirDoneRequest:
+            pass
+
+    def evaluate_codeblock(self, node: mparser.CodeBlockNode, start: int = 0, end: T.Optional[int] = None) -> None:
+        if node is None:
+            return
+        if not isinstance(node, mparser.CodeBlockNode):
+            e = InvalidCode('Tried to execute a non-codeblock. Possibly a bug in the parser.')
+            e.lineno = node.lineno
+            e.colno = node.colno
+            raise e
+        statements = node.lines[start:end]
+        i = 0
+        while i < len(statements):
+            cur = statements[i]
+            try:
+                self.current_lineno = cur.lineno
+                self.evaluate_statement(cur)
+            except Exception as e:
+                if getattr(e, 'lineno', None) is None:
+                    # We are doing the equivalent to setattr here and mypy does not like it
+                    e.lineno = cur.lineno                                                             # type: ignore
+                    e.colno = cur.colno                                                               # type: ignore
+                    e.file = os.path.join(self.source_root, self.subdir, environment.build_filename)  # type: ignore
+                raise e
+            i += 1 # In THE FUTURE jump over blocks and stuff.
+
+    def evaluate_statement(self, cur: mparser.BaseNode) -> T.Optional[T.Union[TYPE_var, InterpreterObject]]:
+        self.current_node = cur
+        if isinstance(cur, mparser.FunctionNode):
+            return self.function_call(cur)
+        elif isinstance(cur, mparser.AssignmentNode):
+            self.assignment(cur)
+        elif isinstance(cur, mparser.MethodNode):
+            return self.method_call(cur)
+        elif isinstance(cur, mparser.StringNode):
+            return cur.value
+        elif isinstance(cur, mparser.BooleanNode):
+            return cur.value
+        elif isinstance(cur, mparser.IfClauseNode):
+            return self.evaluate_if(cur)
+        elif isinstance(cur, mparser.IdNode):
+            return self.get_variable(cur.value)
+        elif isinstance(cur, mparser.ComparisonNode):
+            return self.evaluate_comparison(cur)
+        elif isinstance(cur, mparser.ArrayNode):
+            return self.evaluate_arraystatement(cur)
+        elif isinstance(cur, mparser.DictNode):
+            return self.evaluate_dictstatement(cur)
+        elif isinstance(cur, mparser.NumberNode):
+            return cur.value
+        elif isinstance(cur, mparser.AndNode):
+            return self.evaluate_andstatement(cur)
+        elif isinstance(cur, mparser.OrNode):
+            return self.evaluate_orstatement(cur)
+        elif isinstance(cur, mparser.NotNode):
+            return self.evaluate_notstatement(cur)
+        elif isinstance(cur, mparser.UMinusNode):
+            return self.evaluate_uminusstatement(cur)
+        elif isinstance(cur, mparser.ArithmeticNode):
+            return self.evaluate_arithmeticstatement(cur)
+        elif isinstance(cur, mparser.ForeachClauseNode):
+            self.evaluate_foreach(cur)
+        elif isinstance(cur, mparser.PlusAssignmentNode):
+            self.evaluate_plusassign(cur)
+        elif isinstance(cur, mparser.IndexNode):
+            return self.evaluate_indexing(cur)
+        elif isinstance(cur, mparser.TernaryNode):
+            return self.evaluate_ternary(cur)
+        elif isinstance(cur, mparser.FormatStringNode):
+            return self.evaluate_fstring(cur)
+        elif isinstance(cur, mparser.ContinueNode):
+            raise ContinueRequest()
+        elif isinstance(cur, mparser.BreakNode):
+            raise BreakRequest()
+        elif isinstance(cur, self.elementary_types):
+            return cur
+        else:
+            raise InvalidCode("Unknown statement.")
+        return None
+
+    def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> T.List[T.Union[TYPE_var, InterpreterObject]]:
+        (arguments, kwargs) = self.reduce_arguments(cur.args)
+        if len(kwargs) > 0:
+            raise InvalidCode('Keyword arguments are invalid in array construction.')
+        return arguments
+
+    @FeatureNew('dict', '0.47.0')
+    def evaluate_dictstatement(self, cur: mparser.DictNode) -> T.Union[TYPE_var, InterpreterObject]:
+        def resolve_key(key: mparser.BaseNode) -> str:
+            if not isinstance(key, mparser.StringNode):
+                FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
+            str_key = self.evaluate_statement(key)
+            if not isinstance(str_key, str):
+                raise InvalidArguments('Key must be a string')
+            return str_key
+        arguments, kwargs = self.reduce_arguments(cur.args, key_resolver=resolve_key, duplicate_key_error='Duplicate dictionary key: {}')
+        assert not arguments
+        return kwargs
+
+    def evaluate_notstatement(self, cur: mparser.NotNode) -> T.Union[bool, Disabler]:
+        v = self.evaluate_statement(cur.value)
+        if isinstance(v, Disabler):
+            return v
+        if not isinstance(v, bool):
+            raise InterpreterException('Argument to "not" is not a boolean.')
+        return not v
+
+    def evaluate_if(self, node: mparser.IfClauseNode) -> T.Optional[Disabler]:
+        assert(isinstance(node, mparser.IfClauseNode))
+        for i in node.ifs:
+            # Reset self.tmp_meson_version to know if it gets set during this
+            # statement evaluation.
+            self.tmp_meson_version = None
+            result = self.evaluate_statement(i.condition)
+            if isinstance(result, Disabler):
+                return result
+            if not(isinstance(result, bool)):
+                raise InvalidCode(f'If clause {result!r} does not evaluate to true or false.')
+            if result:
+                prev_meson_version = mesonlib.project_meson_versions[self.subproject]
+                if self.tmp_meson_version:
+                    mesonlib.project_meson_versions[self.subproject] = self.tmp_meson_version
+                try:
+                    self.evaluate_codeblock(i.block)
+                finally:
+                    mesonlib.project_meson_versions[self.subproject] = prev_meson_version
+                return None
+        if not isinstance(node.elseblock, mparser.EmptyNode):
+            self.evaluate_codeblock(node.elseblock)
+        return None
+
+    def validate_comparison_types(self, val1: T.Any, val2: T.Any) -> bool:
+        if type(val1) != type(val2):
+            return False
+        return True
+
+    def evaluate_in(self, val1: T.Any, val2: T.Any) -> bool:
+        if not isinstance(val1, (str, int, float, mesonlib.HoldableObject)):
+            raise InvalidArguments('lvalue of "in" operator must be a string, integer, float, or object')
+        if not isinstance(val2, (list, dict)):
+            raise InvalidArguments('rvalue of "in" operator must be an array or a dict')
+        return val1 in val2
+
+    def evaluate_comparison(self, node: mparser.ComparisonNode) -> T.Union[bool, Disabler]:
+        val1 = self.evaluate_statement(node.left)
+        if isinstance(val1, Disabler):
+            return val1
+        val2 = self.evaluate_statement(node.right)
+        if isinstance(val2, Disabler):
+            return val2
+        # Do not compare the ObjectHolders but the actual held objects
+        val1 = _unholder(val1)
+        val2 = _unholder(val2)
+        if node.ctype == 'in':
+            return self.evaluate_in(val1, val2)
+        elif node.ctype == 'notin':
+            return not self.evaluate_in(val1, val2)
+        valid = self.validate_comparison_types(val1, val2)
+        # Ordering comparisons of different types isn't allowed since PR #1810
+        # (0.41.0).  Since PR #2884 we also warn about equality comparisons of
+        # different types, which will one day become an error.
+        if not valid and (node.ctype == '==' or node.ctype == '!='):
+            mlog.warning('''Trying to compare values of different types ({}, {}) using {}.
+The result of this is undefined and will become a hard error in a future Meson release.'''
+                         .format(type(val1).__name__, type(val2).__name__, node.ctype), location=node)
+        if node.ctype == '==':
+            return val1 == val2
+        elif node.ctype == '!=':
+            return val1 != val2
+        elif not valid:
+            raise InterpreterException(
+                'Values of different types ({}, {}) cannot be compared using {}.'.format(type(val1).__name__,
+                                                                                         type(val2).__name__,
+                                                                                         node.ctype))
+        elif not isinstance(val1, self.elementary_types):
+            raise InterpreterException('{} can only be compared for equality.'.format(getattr(node.left, 'value', '')))
+        elif not isinstance(val2, self.elementary_types):
+            raise InterpreterException('{} can only be compared for equality.'.format(getattr(node.right, 'value', '')))
+        # Use type: ignore because mypy will complain that we are comparing two Unions,
+        # but we actually guarantee earlier that both types are the same
+        elif node.ctype == '<':
+            return val1 < val2   # type: ignore
+        elif node.ctype == '<=':
+            return val1 <= val2  # type: ignore
+        elif node.ctype == '>':
+            return val1 > val2   # type: ignore
+        elif node.ctype == '>=':
+            return val1 >= val2  # type: ignore
+        else:
+            raise InvalidCode('You broke my compare eval.')
+
+    def evaluate_andstatement(self, cur: mparser.AndNode) -> T.Union[bool, Disabler]:
+        l = self.evaluate_statement(cur.left)
+        if isinstance(l, Disabler):
+            return l
+        if not isinstance(l, bool):
+            raise InterpreterException('First argument to "and" is not a boolean.')
+        if not l:
+            return False
+        r = self.evaluate_statement(cur.right)
+        if isinstance(r, Disabler):
+            return r
+        if not isinstance(r, bool):
+            raise InterpreterException('Second argument to "and" is not a boolean.')
+        return r
+
+    def evaluate_orstatement(self, cur: mparser.OrNode) -> T.Union[bool, Disabler]:
+        l = self.evaluate_statement(cur.left)
+        if isinstance(l, Disabler):
+            return l
+        if not isinstance(l, bool):
+            raise InterpreterException('First argument to "or" is not a boolean.')
+        if l:
+            return True
+        r = self.evaluate_statement(cur.right)
+        if isinstance(r, Disabler):
+            return r
+        if not isinstance(r, bool):
+            raise InterpreterException('Second argument to "or" is not a boolean.')
+        return r
+
+    def evaluate_uminusstatement(self, cur: mparser.UMinusNode) -> T.Union[int, Disabler]:
+        v = self.evaluate_statement(cur.value)
+        if isinstance(v, Disabler):
+            return v
+        if not isinstance(v, int):
+            raise InterpreterException('Argument to negation is not an integer.')
+        return -v
+
+    @FeatureNew('/ with string arguments', '0.49.0')
+    def evaluate_path_join(self, l: str, r: str) -> str:
+        if not isinstance(l, str):
+            raise InvalidCode('The division operator can only append to a string.')
+        if not isinstance(r, str):
+            raise InvalidCode('The division operator can only append a string.')
+        return self.join_path_strings((l, r))
+
+    def evaluate_division(self, l: T.Any, r: T.Any) -> T.Union[int, str]:
+        if isinstance(l, str) or isinstance(r, str):
+            return self.evaluate_path_join(l, r)
+        if isinstance(l, int) and isinstance(r, int):
+            if r == 0:
+                raise InvalidCode('Division by zero.')
+            return l // r
+        raise InvalidCode('Division works only with strings or integers.')
+
+    def evaluate_arithmeticstatement(self, cur: mparser.ArithmeticNode) -> T.Union[int, str, dict, list, Disabler]:
+        l = self.evaluate_statement(cur.left)
+        if isinstance(l, Disabler):
+            return l
+        r = self.evaluate_statement(cur.right)
+        if isinstance(r, Disabler):
+            return r
+
+        if cur.operation == 'add':
+            if isinstance(l, dict) and isinstance(r, dict):
+                return {**l, **r}
+            try:
+                # MyPy error due to handling two Unions (we are catching all exceptions anyway)
+                return l + r  # type: ignore
+            except Exception as e:
+                raise InvalidCode('Invalid use of addition: ' + str(e))
+        elif cur.operation == 'sub':
+            if not isinstance(l, int) or not isinstance(r, int):
+                raise InvalidCode('Subtraction works only with integers.')
+            return l - r
+        elif cur.operation == 'mul':
+            if not isinstance(l, int) or not isinstance(r, int):
+                raise InvalidCode('Multiplication works only with integers.')
+            return l * r
+        elif cur.operation == 'div':
+            return self.evaluate_division(l, r)
+        elif cur.operation == 'mod':
+            if not isinstance(l, int) or not isinstance(r, int):
+                raise InvalidCode('Modulo works only with integers.')
+            return l % r
+        else:
+            raise InvalidCode('You broke me.')
+
+    def evaluate_ternary(self, node: mparser.TernaryNode) -> T.Union[TYPE_var, InterpreterObject]:
+        assert(isinstance(node, mparser.TernaryNode))
+        result = self.evaluate_statement(node.condition)
+        if isinstance(result, Disabler):
+            return result
+        if not isinstance(result, bool):
+            raise InterpreterException('Ternary condition is not boolean.')
+        if result:
+            return self.evaluate_statement(node.trueblock)
+        else:
+            return self.evaluate_statement(node.falseblock)
+
+    @FeatureNew('format strings', '0.58.0')
+    def evaluate_fstring(self, node: mparser.FormatStringNode) -> TYPE_var:
+        assert(isinstance(node, mparser.FormatStringNode))
+
+        def replace(match: T.Match[str]) -> str:
+            var = str(match.group(1))
+            try:
+                val = self.variables[var]
+                if not isinstance(val, (str, int, float, bool)):
+                    raise InvalidCode(f'Identifier "{var}" does not name a formattable variable ' +
+                        '(has to be an integer, a string, a floating point number or a boolean).')
+
+                return str(val)
+            except KeyError:
+                raise InvalidCode(f'Identifier "{var}" does not name a variable.')
+
+        return re.sub(r'@([_a-zA-Z][_0-9a-zA-Z]*)@', replace, node.value)
+
+    def evaluate_foreach(self, node: mparser.ForeachClauseNode) -> None:
+        assert(isinstance(node, mparser.ForeachClauseNode))
+        items = self.evaluate_statement(node.items)
+
+        if isinstance(items, (list, RangeHolder)):
+            if len(node.varnames) != 1:
+                raise InvalidArguments('Foreach on array does not unpack')
+            varname = node.varnames[0]
+            for item in items:
+                self.set_variable(varname, item)
+                try:
+                    self.evaluate_codeblock(node.block)
+                except ContinueRequest:
+                    continue
+                except BreakRequest:
+                    break
+        elif isinstance(items, dict):
+            if len(node.varnames) != 2:
+                raise InvalidArguments('Foreach on dict unpacks key and value')
+            for key, value in sorted(items.items()):
+                self.set_variable(node.varnames[0], key)
+                self.set_variable(node.varnames[1], value)
+                try:
+                    self.evaluate_codeblock(node.block)
+                except ContinueRequest:
+                    continue
+                except BreakRequest:
+                    break
+        else:
+            raise InvalidArguments('Items of foreach loop must be an array or a dict')
+
+    def evaluate_plusassign(self, node: mparser.PlusAssignmentNode) -> None:
+        assert(isinstance(node, mparser.PlusAssignmentNode))
+        varname = node.var_name
+        addition = self.evaluate_statement(node.value)
+
+        # Remember that all variables are immutable. We must always create a
+        # full new variable and then assign it.
+        old_variable = self.get_variable(varname)
+        new_value = None  # type: T.Union[str, int, float, bool, dict, list]
+        if isinstance(old_variable, str):
+            if not isinstance(addition, str):
+                raise InvalidArguments('The += operator requires a string on the right hand side if the variable on the left is a string')
+            new_value = old_variable + addition
+        elif isinstance(old_variable, int):
+            if not isinstance(addition, int):
+                raise InvalidArguments('The += operator requires an int on the right hand side if the variable on the left is an int')
+            new_value = old_variable + addition
+        elif isinstance(old_variable, list):
+            if isinstance(addition, list):
+                new_value = old_variable + addition
+            else:
+                new_value = old_variable + [addition]
+        elif isinstance(old_variable, dict):
+            if not isinstance(addition, dict):
+                raise InvalidArguments('The += operator requires a dict on the right hand side if the variable on the left is a dict')
+            new_value = {**old_variable, **addition}
+        # Add other data types here.
+        else:
+            raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints')
+        self.set_variable(varname, new_value)
+
+    def evaluate_indexing(self, node: mparser.IndexNode) -> T.Union[TYPE_elementary, InterpreterObject]:
+        assert(isinstance(node, mparser.IndexNode))
+        iobject = self.evaluate_statement(node.iobject)
+        if isinstance(iobject, Disabler):
+            return iobject
+        if not hasattr(iobject, '__getitem__'):
+            raise InterpreterException(
+                'Tried to index an object that doesn\'t support indexing.')
+        index = self.evaluate_statement(node.index)
+
+        if isinstance(iobject, dict):
+            if not isinstance(index, str):
+                raise InterpreterException('Key is not a string')
+            try:
+                # The cast is required because we don't have recursive types...
+                return T.cast(T.Union[TYPE_elementary, InterpreterObject], iobject[index])
+            except KeyError:
+                raise InterpreterException('Key %s is not in dict' % index)
+        else:
+            if not isinstance(index, int):
+                raise InterpreterException('Index value is not an integer.')
+            try:
+                # Ignore the MyPy error, since we don't know all indexable types here
+                # and we handle non indexable types with an exception
+                # TODO maybe find a better solution
+                res = iobject[index]  # type: ignore
+                # Only holderify if we are dealing with `InterpreterObject`, since raw
+                # lists already store ObjectHolders
+                if isinstance(iobject, InterpreterObject):
+                    return self._holderify(res)
+                else:
+                    return res
+            except IndexError:
+                # We are already checking for the existence of __getitem__, so this should be save
+                raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject)))  # type: ignore
+
+    def function_call(self, node: mparser.FunctionNode) -> T.Optional[T.Union[TYPE_elementary, InterpreterObject]]:
+        func_name = node.func_name
+        (h_posargs, h_kwargs) = self.reduce_arguments(node.args)
+        (posargs, kwargs) = self._unholder_args(h_posargs, h_kwargs)
+        if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'is_disabler'}:
+            return Disabler()
+        if func_name in self.funcs:
+            func = self.funcs[func_name]
+            func_args = posargs
+            if not getattr(func, 'no-args-flattening', False):
+                func_args = flatten(posargs)
+            if not getattr(func, 'no-second-level-holder-flattening', False):
+                func_args, kwargs = resolve_second_level_holders(func_args, kwargs)
+            res = func(node, func_args, kwargs)
+            return self._holderify(res)
+        else:
+            self.unknown_function_called(func_name)
+            return None
+
+    def method_call(self, node: mparser.MethodNode) -> T.Optional[T.Union[TYPE_var, InterpreterObject]]:
+        invokable = node.source_object
+        obj: T.Union[TYPE_var, InterpreterObject]
+        if isinstance(invokable, mparser.IdNode):
+            object_name = invokable.value
+            obj = self.get_variable(object_name)
+        else:
+            obj = self.evaluate_statement(invokable)
+        method_name = node.name
+        (h_args, h_kwargs) = self.reduce_arguments(node.args)
+        (args, kwargs) = self._unholder_args(h_args, h_kwargs)
+        if is_disabled(args, kwargs):
+            return Disabler()
+        if isinstance(obj, str):
+            return self.string_method_call(obj, method_name, args, kwargs)
+        if isinstance(obj, bool):
+            return self.bool_method_call(obj, method_name, args, kwargs)
+        if isinstance(obj, int):
+            return self.int_method_call(obj, method_name, args, kwargs)
+        if isinstance(obj, list):
+            return self.array_method_call(obj, method_name, args, kwargs)
+        if isinstance(obj, dict):
+            return self.dict_method_call(obj, method_name, args, kwargs)
+        if not isinstance(obj, InterpreterObject):
+            raise InvalidArguments('Variable "%s" is not callable.' % object_name)
+        # Special case. This is the only thing you can do with a disabler
+        # object. Every other use immediately returns the disabler object.
+        if isinstance(obj, Disabler):
+            if method_name == 'found':
+                return False
+            else:
+                return Disabler()
+        # TODO: InterpreterBase **really** shouldn't be in charge of checking this
+        if method_name == 'extract_objects':
+            if not isinstance(obj, ObjectHolder):
+                raise InvalidArguments(f'Invalid operation "extract_objects" on variable "{object_name}" of type {type(obj).__name__}')
+            self.validate_extraction(obj.held_object)
+        obj.current_node = node
+        return self._holderify(obj.method_call(method_name, args, kwargs))
+
+    def _holderify(self, res: T.Union[TYPE_var, InterpreterObject, None]) -> T.Union[TYPE_elementary, InterpreterObject]:
+        if res is None:
+            return None
+        if isinstance(res, (int, bool, str)):
+            return res
+        elif isinstance(res, list):
+            return [self._holderify(x) for x in res]
+        elif isinstance(res, dict):
+            return {k: self._holderify(v) for k, v in res.items()}
+        elif isinstance(res, mesonlib.HoldableObject):
+            # Always check for an exact match first.
+            cls = self.holder_map.get(type(res), None)
+            if cls is not None:
+                # Casts to Interpreter are required here since an assertion would
+                # not work for the `ast` module.
+                return cls(res, T.cast('Interpreter', self))
+            # Try the boundary types next.
+            for typ, cls in self.bound_holder_map.items():
+                if isinstance(res, typ):
+                    return cls(res, T.cast('Interpreter', self))
+            raise mesonlib.MesonBugException(f'Object {res} of type {type(res).__name__} is neither in self.holder_map nor self.bound_holder_map.')
+        elif isinstance(res, ObjectHolder):
+            raise mesonlib.MesonBugException(f'Returned object {res} of type {type(res).__name__} is an object holder.')
+        elif isinstance(res, MesonInterpreterObject):
+            return res
+        raise mesonlib.MesonBugException(f'Unknown returned object {res} of type {type(res).__name__} in the parameters.')
+
+    def _unholder_args(self,
+                       args: T.List[T.Union[TYPE_var, InterpreterObject]],
+                       kwargs: T.Dict[str, T.Union[TYPE_var, InterpreterObject]]) -> T.Tuple[T.List[TYPE_var], TYPE_kwargs]:
+        return [_unholder(x) for x in args], {k: _unholder(v) for k, v in kwargs.items()}
+
+    @builtinMethodNoKwargs
+    def bool_method_call(self, obj: bool, method_name: str, posargs: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int]:
+        if method_name == 'to_string':
+            if not posargs:
+                if obj:
+                    return 'true'
+                else:
+                    return 'false'
+            elif len(posargs) == 2 and isinstance(posargs[0], str) and isinstance(posargs[1], str):
+                if obj:
+                    return posargs[0]
+                else:
+                    return posargs[1]
+            else:
+                raise InterpreterException('bool.to_string() must have either no arguments or exactly two string arguments that signify what values to return for true and false.')
+        elif method_name == 'to_int':
+            if obj:
+                return 1
+            else:
+                return 0
+        else:
+            raise InterpreterException('Unknown method "%s" for a boolean.' % method_name)
+
+    @builtinMethodNoKwargs
+    def int_method_call(self, obj: int, method_name: str, posargs: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, bool]:
+        if method_name == 'is_even':
+            if not posargs:
+                return obj % 2 == 0
+            else:
+                raise InterpreterException('int.is_even() must have no arguments.')
+        elif method_name == 'is_odd':
+            if not posargs:
+                return obj % 2 != 0
+            else:
+                raise InterpreterException('int.is_odd() must have no arguments.')
+        elif method_name == 'to_string':
+            if not posargs:
+                return str(obj)
+            else:
+                raise InterpreterException('int.to_string() must have no arguments.')
+        else:
+            raise InterpreterException('Unknown method "%s" for an integer.' % method_name)
+
+    @staticmethod
+    def _get_one_string_posarg(posargs: T.List[TYPE_var], method_name: str) -> str:
+        if len(posargs) > 1:
+            raise InterpreterException(f'{method_name}() must have zero or one arguments')
+        elif len(posargs) == 1:
+            s = posargs[0]
+            if not isinstance(s, str):
+                raise InterpreterException(f'{method_name}() argument must be a string')
+            return s
+        return None
+
+    @builtinMethodNoKwargs
+    def string_method_call(self, obj: str, method_name: str, posargs: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool, T.List[str]]:
+        if method_name == 'strip':
+            s1 = self._get_one_string_posarg(posargs, 'strip')
+            if s1 is not None:
+                return obj.strip(s1)
+            return obj.strip()
+        elif method_name == 'format':
+            return self.format_string(obj, posargs)
+        elif method_name == 'to_upper':
+            return obj.upper()
+        elif method_name == 'to_lower':
+            return obj.lower()
+        elif method_name == 'underscorify':
+            return re.sub(r'[^a-zA-Z0-9]', '_', obj)
+        elif method_name == 'split':
+            s2 = self._get_one_string_posarg(posargs, 'split')
+            if s2 is not None:
+                return obj.split(s2)
+            return obj.split()
+        elif method_name == 'startswith' or method_name == 'contains' or method_name == 'endswith':
+            s3 = posargs[0]
+            if not isinstance(s3, str):
+                raise InterpreterException('Argument must be a string.')
+            if method_name == 'startswith':
+                return obj.startswith(s3)
+            elif method_name == 'contains':
+                return obj.find(s3) >= 0
+            return obj.endswith(s3)
+        elif method_name == 'to_int':
+            try:
+                return int(obj)
+            except Exception:
+                raise InterpreterException(f'String {obj!r} cannot be converted to int')
+        elif method_name == 'join':
+            if len(posargs) != 1:
+                raise InterpreterException('Join() takes exactly one argument.')
+            strlist = posargs[0]
+            check_stringlist(strlist)
+            assert isinstance(strlist, list)  # Required for mypy
+            return obj.join(strlist)
+        elif method_name == 'version_compare':
+            if len(posargs) != 1:
+                raise InterpreterException('Version_compare() takes exactly one argument.')
+            cmpr = posargs[0]
+            if not isinstance(cmpr, str):
+                raise InterpreterException('Version_compare() argument must be a string.')
+            if isinstance(obj, MesonVersionString):
+                self.tmp_meson_version = cmpr
+            return mesonlib.version_compare(obj, cmpr)
+        elif method_name == 'substring':
+            if len(posargs) > 2:
+                raise InterpreterException('substring() takes maximum two arguments.')
+            start = 0
+            end = len(obj)
+            if len (posargs) > 0:
+                if not isinstance(posargs[0], int):
+                    raise InterpreterException('substring() argument must be an int')
+                start = posargs[0]
+            if len (posargs) > 1:
+                if not isinstance(posargs[1], int):
+                    raise InterpreterException('substring() argument must be an int')
+                end = posargs[1]
+            return obj[start:end]
+        elif method_name == 'replace':
+            FeatureNew.single_use('str.replace', '0.58.0', self.subproject)
+            if len(posargs) != 2:
+                raise InterpreterException('replace() takes exactly two arguments.')
+            if not isinstance(posargs[0], str) or not isinstance(posargs[1], str):
+                raise InterpreterException('replace() requires that both arguments be strings')
+            return obj.replace(posargs[0], posargs[1])
+        raise InterpreterException('Unknown method "%s" for a string.' % method_name)
+
+    def format_string(self, templ: str, args: T.List[TYPE_var]) -> str:
+        arg_strings = []
+        for arg in args:
+            if isinstance(arg, mparser.BaseNode):
+                arg = self.evaluate_statement(arg)
+            if isinstance(arg, bool): # Python boolean is upper case.
+                arg = str(arg).lower()
+            arg_strings.append(str(arg))
+
+        def arg_replace(match: T.Match[str]) -> str:
+            idx = int(match.group(1))
+            if idx >= len(arg_strings):
+                raise InterpreterException(f'Format placeholder @{idx}@ out of range.')
+            return arg_strings[idx]
+
+        return re.sub(r'@(\d+)@', arg_replace, templ)
+
+    def unknown_function_called(self, func_name: str) -> None:
+        raise InvalidCode('Unknown function "%s".' % func_name)
+
+    @builtinMethodNoKwargs
+    def array_method_call(self,
+                          obj: T.List[T.Union[TYPE_elementary, InterpreterObject]],
+                          method_name: str,
+                          posargs: T.List[TYPE_var],
+                          kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+        if method_name == 'contains':
+            def check_contains(el: T.List[TYPE_var]) -> bool:
+                if len(posargs) != 1:
+                    raise InterpreterException('Contains method takes exactly one argument.')
+                item = posargs[0]
+                for element in el:
+                    if isinstance(element, list):
+                        found = check_contains(element)
+                        if found:
+                            return True
+                    if element == item:
+                        return True
+                return False
+            return check_contains([_unholder(x) for x in obj])
+        elif method_name == 'length':
+            return len(obj)
+        elif method_name == 'get':
+            index = posargs[0]
+            fallback = None
+            if len(posargs) == 2:
+                fallback = self._holderify(posargs[1])
+            elif len(posargs) > 2:
+                m = 'Array method \'get()\' only takes two arguments: the ' \
+                    'index and an optional fallback value if the index is ' \
+                    'out of range.'
+                raise InvalidArguments(m)
+            if not isinstance(index, int):
+                raise InvalidArguments('Array index must be a number.')
+            if index < -len(obj) or index >= len(obj):
+                if fallback is None:
+                    m = 'Array index {!r} is out of bounds for array of size {!r}.'
+                    raise InvalidArguments(m.format(index, len(obj)))
+                if isinstance(fallback, mparser.BaseNode):
+                    return self.evaluate_statement(fallback)
+                return fallback
+            return obj[index]
+        raise InterpreterException(f'Arrays do not have a method called {method_name!r}.')
+
+    @builtinMethodNoKwargs
+    def dict_method_call(self,
+                         obj: T.Dict[str, T.Union[TYPE_elementary, InterpreterObject]],
+                         method_name: str,
+                         posargs: T.List[TYPE_var],
+                         kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+        if method_name in ('has_key', 'get'):
+            if method_name == 'has_key':
+                if len(posargs) != 1:
+                    raise InterpreterException('has_key() takes exactly one argument.')
+            else:
+                if len(posargs) not in (1, 2):
+                    raise InterpreterException('get() takes one or two arguments.')
+
+            key = posargs[0]
+            if not isinstance(key, (str)):
+                raise InvalidArguments('Dictionary key must be a string.')
+
+            has_key = key in obj
+
+            if method_name == 'has_key':
+                return has_key
+
+            if has_key:
+                return obj[key]
+
+            if len(posargs) == 2:
+                fallback = self._holderify(posargs[1])
+                if isinstance(fallback, mparser.BaseNode):
+                    return self.evaluate_statement(fallback)
+                return fallback
+
+            raise InterpreterException(f'Key {key!r} is not in the dictionary.')
+
+        if method_name == 'keys':
+            if len(posargs) != 0:
+                raise InterpreterException('keys() takes no arguments.')
+            return sorted(obj.keys())
+
+        raise InterpreterException('Dictionaries do not have a method called "%s".' % method_name)
+
+    def reduce_arguments(
+                self,
+                args: mparser.ArgumentNode,
+                key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
+                duplicate_key_error: T.Optional[str] = None,
+            ) -> T.Tuple[
+                T.List[T.Union[TYPE_var, InterpreterObject]],
+                T.Dict[str, T.Union[TYPE_var, InterpreterObject]]
+            ]:
+        assert(isinstance(args, mparser.ArgumentNode))
+        if args.incorrect_order():
+            raise InvalidArguments('All keyword arguments must be after positional arguments.')
+        self.argument_depth += 1
+        reduced_pos: T.List[T.Union[TYPE_var, InterpreterObject]] = [self.evaluate_statement(arg) for arg in args.arguments]
+        reduced_kw: T.Dict[str, T.Union[TYPE_var, InterpreterObject]] = {}
+        for key, val in args.kwargs.items():
+            reduced_key = key_resolver(key)
+            assert isinstance(val, mparser.BaseNode)
+            reduced_val = self.evaluate_statement(val)
+            if duplicate_key_error and reduced_key in reduced_kw:
+                raise InvalidArguments(duplicate_key_error.format(reduced_key))
+            reduced_kw[reduced_key] = reduced_val
+        self.argument_depth -= 1
+        final_kw = self.expand_default_kwargs(reduced_kw)
+        return reduced_pos, final_kw
+
+    def expand_default_kwargs(self, kwargs: T.Dict[str, T.Union[TYPE_var, InterpreterObject]]) -> T.Dict[str, T.Union[TYPE_var, InterpreterObject]]:
+        if 'kwargs' not in kwargs:
+            return kwargs
+        to_expand = kwargs.pop('kwargs')
+        if not isinstance(to_expand, dict):
+            raise InterpreterException('Value of "kwargs" must be dictionary.')
+        if 'kwargs' in to_expand:
+            raise InterpreterException('Kwargs argument must not contain a "kwargs" entry. Points for thinking meta, though. :P')
+        for k, v in to_expand.items():
+            if k in kwargs:
+                raise InterpreterException(f'Entry "{k}" defined both as a keyword argument and in a "kwarg" entry.')
+            kwargs[k] = v
+        return kwargs
+
+    def assignment(self, node: mparser.AssignmentNode) -> None:
+        assert(isinstance(node, mparser.AssignmentNode))
+        if self.argument_depth != 0:
+            raise InvalidArguments('''Tried to assign values inside an argument list.
+To specify a keyword argument, use : instead of =.''')
+        var_name = node.var_name
+        if not isinstance(var_name, str):
+            raise InvalidArguments('Tried to assign value to a non-variable.')
+        value = self.evaluate_statement(node.value)
+        if not self.is_assignable(value):
+            raise InvalidCode(f'Tried to assign the invalid value "{value}" of type {type(value).__name__} to variable.')
+        # For mutable objects we need to make a copy on assignment
+        if isinstance(value, MutableInterpreterObject):
+            value = copy.deepcopy(value)
+        self.set_variable(var_name, value)
+        return None
+
+    def set_variable(self, varname: str, variable: T.Union[TYPE_var, InterpreterObject], *, holderify: bool = False) -> None:
+        if variable is None:
+            raise InvalidCode('Can not assign None to variable.')
+        if holderify:
+            variable = self._holderify(variable)
+        else:
+            # Ensure that we are never storing a HoldableObject
+            def check(x: T.Union[TYPE_var, InterpreterObject]) -> None:
+                if isinstance(x, mesonlib.HoldableObject):
+                    raise mesonlib.MesonBugException(f'set_variable in InterpreterBase called with a HoldableObject {x} of type {type(x).__name__}')
+                elif isinstance(x, list):
+                    for y in x:
+                        check(y)
+                elif isinstance(x, dict):
+                    for v in x.values():
+                        check(v)
+            check(variable)
+        if not isinstance(varname, str):
+            raise InvalidCode('First argument to set_variable must be a string.')
+        if not self.is_assignable(variable):
+            raise InvalidCode(f'Assigned value "{variable}" of type {type(variable).__name__} is not an assignable type.')
+        if re.match('[_a-zA-Z][_0-9a-zA-Z]*$', varname) is None:
+            raise InvalidCode('Invalid variable name: ' + varname)
+        if varname in self.builtin:
+            raise InvalidCode('Tried to overwrite internal variable "%s"' % varname)
+        self.variables[varname] = variable
+
+    def get_variable(self, varname: str) -> T.Union[TYPE_var, InterpreterObject]:
+        if varname in self.builtin:
+            return self.builtin[varname]
+        if varname in self.variables:
+            return self.variables[varname]
+        raise InvalidCode('Unknown variable "%s".' % varname)
+
+    def is_assignable(self, value: T.Any) -> bool:
+        return isinstance(value, (InterpreterObject, str, int, list, dict))
+
+    def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+        raise InterpreterException('validate_extraction is not implemented in this context (please file a bug)')
diff --git a/meson/mesonbuild/linkers/__init__.py b/meson/mesonbuild/linkers/__init__.py
new file mode 100644
index 000000000..9182fa1a8
--- /dev/null
+++ b/meson/mesonbuild/linkers/__init__.py
@@ -0,0 +1,126 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .detect import (
+    defaults,
+    guess_win_linker,
+    guess_nix_linker,
+)
+from .linkers import (
+    RSPFileSyntax,
+
+    StaticLinker,
+    VisualStudioLikeLinker,
+    VisualStudioLinker,
+    IntelVisualStudioLinker,
+    ArLinker,
+    ArmarLinker,
+    DLinker,
+    CcrxLinker,
+    Xc16Linker,
+    CompCertLinker,
+    C2000Linker,
+    AIXArLinker,
+    PGIStaticLinker,
+    NvidiaHPC_StaticLinker,
+
+    DynamicLinker,
+    PosixDynamicLinkerMixin,
+    GnuLikeDynamicLinkerMixin,
+    AppleDynamicLinker,
+    GnuDynamicLinker,
+    GnuGoldDynamicLinker,
+    GnuBFDDynamicLinker,
+    LLVMDynamicLinker,
+    WASMDynamicLinker,
+    CcrxDynamicLinker,
+    Xc16DynamicLinker,
+    CompCertDynamicLinker,
+    C2000DynamicLinker,
+    ArmDynamicLinker,
+    ArmClangDynamicLinker,
+    QualcommLLVMDynamicLinker,
+    PGIDynamicLinker,
+    NvidiaHPC_DynamicLinker,
+
+    VisualStudioLikeLinkerMixin,
+    MSVCDynamicLinker,
+    ClangClDynamicLinker,
+    XilinkDynamicLinker,
+    SolarisDynamicLinker,
+    AIXDynamicLinker,
+    OptlinkDynamicLinker,
+    CudaLinker,
+
+    prepare_rpaths,
+    order_rpaths,
+    evaluate_rpath,
+)
+
+__all__ = [
+    # detect.py
+    'defaults',
+    'guess_win_linker',
+    'guess_nix_linker',
+
+    # linkers.py
+    'RSPFileSyntax',
+
+    'StaticLinker',
+    'VisualStudioLikeLinker',
+    'VisualStudioLinker',
+    'IntelVisualStudioLinker',
+    'ArLinker',
+    'ArmarLinker',
+    'DLinker',
+    'CcrxLinker',
+    'Xc16Linker',
+    'CompCertLinker',
+    'C2000Linker',
+    'AIXArLinker',
+    'PGIStaticLinker',
+    'NvidiaHPC_StaticLinker',
+
+    'DynamicLinker',
+    'PosixDynamicLinkerMixin',
+    'GnuLikeDynamicLinkerMixin',
+    'AppleDynamicLinker',
+    'GnuDynamicLinker',
+    'GnuGoldDynamicLinker',
+    'GnuBFDDynamicLinker',
+    'LLVMDynamicLinker',
+    'WASMDynamicLinker',
+    'CcrxDynamicLinker',
+    'Xc16DynamicLinker',
+    'CompCertDynamicLinker',
+    'C2000DynamicLinker',
+    'ArmDynamicLinker',
+    'ArmClangDynamicLinker',
+    'QualcommLLVMDynamicLinker',
+    'PGIDynamicLinker',
+    'NvidiaHPC_DynamicLinker',
+
+    'VisualStudioLikeLinkerMixin',
+    'MSVCDynamicLinker',
+    'ClangClDynamicLinker',
+    'XilinkDynamicLinker',
+    'SolarisDynamicLinker',
+    'AIXDynamicLinker',
+    'OptlinkDynamicLinker',
+    'CudaLinker',
+
+    'prepare_rpaths',
+    'order_rpaths',
+    'evaluate_rpath',
+]
diff --git a/meson/mesonbuild/linkers/detect.py b/meson/mesonbuild/linkers/detect.py
new file mode 100644
index 000000000..43dccbbcb
--- /dev/null
+++ b/meson/mesonbuild/linkers/detect.py
@@ -0,0 +1,216 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..mesonlib import (
+    EnvironmentException, MachineChoice, OptionKey,
+    Popen_safe, search_version
+)
+from .linkers import (
+    DynamicLinker,
+    AppleDynamicLinker,
+    GnuDynamicLinker,
+    GnuGoldDynamicLinker,
+    GnuBFDDynamicLinker,
+    LLVMDynamicLinker,
+    QualcommLLVMDynamicLinker,
+    MSVCDynamicLinker,
+    ClangClDynamicLinker,
+    SolarisDynamicLinker,
+    AIXDynamicLinker,
+    OptlinkDynamicLinker,
+)
+
+import re
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from ..compilers import Compiler
+
+defaults: T.Dict[str, T.List[str]] = {}
+defaults['static_linker'] = ['ar', 'gar']
+defaults['vs_static_linker'] = ['lib']
+defaults['clang_cl_static_linker'] = ['llvm-lib']
+defaults['cuda_static_linker'] = ['nvlink']
+defaults['gcc_static_linker'] = ['gcc-ar']
+defaults['clang_static_linker'] = ['llvm-ar']
+
+def __failed_to_detect_linker(compiler: T.List[str], args: T.List[str], stdout: str, stderr: str) -> 'T.NoReturn':
+    msg = 'Unable to detect linker for compiler "{} {}"\nstdout: {}\nstderr: {}'.format(
+        ' '.join(compiler), ' '.join(args), stdout, stderr)
+    raise EnvironmentException(msg)
+
+
+def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Type['Compiler'],
+                        for_machine: MachineChoice, *,
+                        use_linker_prefix: bool = True, invoked_directly: bool = True,
+                        extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
+    env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+
+    # Explicitly pass logo here so that we can get the version of link.exe
+    if not use_linker_prefix or comp_class.LINKER_PREFIX is None:
+        check_args = ['/logo', '--version']
+    elif isinstance(comp_class.LINKER_PREFIX, str):
+        check_args = [comp_class.LINKER_PREFIX + '/logo', comp_class.LINKER_PREFIX + '--version']
+    elif isinstance(comp_class.LINKER_PREFIX, list):
+        check_args = comp_class.LINKER_PREFIX + ['/logo'] + comp_class.LINKER_PREFIX + ['--version']
+
+    check_args += env.coredata.options[OptionKey('args', lang=comp_class.language, machine=for_machine)].value
+
+    override = []  # type: T.List[str]
+    value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
+    if value is not None:
+        override = comp_class.use_linker_args(value[0])
+        check_args += override
+
+    if extra_args is not None:
+        check_args.extend(extra_args)
+
+    p, o, _ = Popen_safe(compiler + check_args)
+    if 'LLD' in o.split('\n')[0]:
+        if '(compatible with GNU linkers)' in o:
+            return LLVMDynamicLinker(
+                compiler, for_machine, comp_class.LINKER_PREFIX,
+                override, version=search_version(o))
+        elif not invoked_directly:
+            return ClangClDynamicLinker(
+                for_machine, override, exelist=compiler, prefix=comp_class.LINKER_PREFIX,
+                version=search_version(o), direct=False, machine=None)
+
+    if value is not None and invoked_directly:
+        compiler = value
+        # We've already hanedled the non-direct case above
+
+    p, o, e = Popen_safe(compiler + check_args)
+    if 'LLD' in o.split('\n')[0]:
+        return ClangClDynamicLinker(
+            for_machine, [],
+            prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
+            exelist=compiler, version=search_version(o), direct=invoked_directly)
+    elif 'OPTLINK' in o:
+        # Opltink's stdout *may* beging with a \r character.
+        return OptlinkDynamicLinker(compiler, for_machine, version=search_version(o))
+    elif o.startswith('Microsoft') or e.startswith('Microsoft'):
+        out = o or e
+        match = re.search(r'.*(X86|X64|ARM|ARM64).*', out)
+        if match:
+            target = str(match.group(1))
+        else:
+            target = 'x86'
+
+        return MSVCDynamicLinker(
+            for_machine, [], machine=target, exelist=compiler,
+            prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
+            version=search_version(out), direct=invoked_directly)
+    elif 'GNU coreutils' in o:
+        raise EnvironmentException(
+            "Found GNU link.exe instead of MSVC link.exe. This link.exe "
+            "is not a linker. You may need to reorder entries to your "
+            "%PATH% variable to resolve this.")
+    __failed_to_detect_linker(compiler, check_args, o, e)
+
+def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Type['Compiler'],
+                        for_machine: MachineChoice, *,
+                        extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
+    """Helper for guessing what linker to use on Unix-Like OSes.
+
+    :compiler: Invocation to use to get linker
+    :comp_class: The Compiler Type (uninstantiated)
+    :for_machine: which machine this linker targets
+    :extra_args: Any additional arguments required (such as a source file)
+    """
+    env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+    extra_args = extra_args or []
+    extra_args += env.coredata.options[OptionKey('args', lang=comp_class.language, machine=for_machine)].value
+
+    if isinstance(comp_class.LINKER_PREFIX, str):
+        check_args = [comp_class.LINKER_PREFIX + '--version'] + extra_args
+    else:
+        check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args
+
+    override = []  # type: T.List[str]
+    value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
+    if value is not None:
+        override = comp_class.use_linker_args(value[0])
+        check_args += override
+
+    _, o, e = Popen_safe(compiler + check_args)
+    v = search_version(o + e)
+    linker: DynamicLinker
+    if 'LLD' in o.split('\n')[0]:
+        linker = LLVMDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif 'Snapdragon' in e and 'LLVM' in e:
+        linker = QualcommLLVMDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif e.startswith('lld-link: '):
+        # The LLD MinGW frontend didn't respond to --version before version 9.0.0,
+        # and produced an error message about failing to link (when no object
+        # files were specified), instead of printing the version number.
+        # Let's try to extract the linker invocation command to grab the version.
+
+        _, o, e = Popen_safe(compiler + check_args + ['-v'])
+
+        try:
+            linker_cmd = re.match(r'.*\n(.*?)\nlld-link: ', e, re.DOTALL).group(1)
+            linker_cmd = shlex.split(linker_cmd)[0]
+        except (AttributeError, IndexError, ValueError):
+            pass
+        else:
+            _, o, e = Popen_safe([linker_cmd, '--version'])
+            v = search_version(o)
+
+        linker = LLVMDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    # first is for apple clang, second is for real gcc, the third is icc
+    elif e.endswith('(use -v to see invocation)\n') or 'macosx_version' in e or 'ld: unknown option:' in e:
+        if isinstance(comp_class.LINKER_PREFIX, str):
+            _, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-v'] + extra_args)
+        else:
+            _, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-v'] + extra_args)
+        for line in e.split('\n'):
+            if 'PROJECT:ld' in line:
+                v = line.split('-')[1]
+                break
+        else:
+            v = 'unknown version'
+        linker = AppleDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif 'GNU' in o or 'GNU' in e:
+        cls: T.Type[GnuDynamicLinker]
+        if 'gold' in o or 'gold' in e:
+            cls = GnuGoldDynamicLinker
+        else:
+            cls = GnuBFDDynamicLinker
+        linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif 'Solaris' in e or 'Solaris' in o:
+        for line in (o+e).split('\n'):
+            if 'ld: Software Generation Utilities' in line:
+                v = line.split(':')[2].lstrip()
+                break
+        else:
+            v = 'unknown version'
+        linker = SolarisDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override,
+            version=v)
+    elif 'ld: 0706-012 The -- flag is not recognized' in e:
+        if isinstance(comp_class.LINKER_PREFIX, str):
+            _, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-V'] + extra_args)
+        else:
+            _, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-V'] + extra_args)
+        linker = AIXDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override,
+            version=search_version(e))
+    else:
+        __failed_to_detect_linker(compiler, check_args, o, e)
+    return linker
diff --git a/meson/mesonbuild/linkers/linkers.py b/meson/mesonbuild/linkers/linkers.py
new file mode 100644
index 000000000..bcd5355b8
--- /dev/null
+++ b/meson/mesonbuild/linkers/linkers.py
@@ -0,0 +1,1433 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import enum
+import os
+import typing as T
+
+from .. import mesonlib
+from ..mesonlib import EnvironmentException, MesonException
+from ..arglist import CompilerArgs
+
+if T.TYPE_CHECKING:
+    from ..coredata import KeyedOptionDictType
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+
+@enum.unique
+class RSPFileSyntax(enum.Enum):
+
+    """Which RSP file syntax the compiler supports."""
+
+    MSVC = enum.auto()
+    GCC = enum.auto()
+
+
+class StaticLinker:
+
+    id: str
+
+    def __init__(self, exelist: T.List[str]):
+        self.exelist = exelist
+
+    def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+        return CompilerArgs(self, args)
+
+    def can_linker_accept_rsp(self) -> bool:
+        """
+        Determines whether the linker can accept arguments using the @rsp syntax.
+        """
+        return mesonlib.is_windows()
+
+    def get_base_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        """Like compilers.get_base_link_args, but for the static linker."""
+        return []
+
+    def get_exelist(self) -> T.List[str]:
+        return self.exelist.copy()
+
+    def get_std_link_args(self) -> T.List[str]:
+        return []
+
+    def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+        return []
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return[]
+
+    def get_coverage_link_args(self) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def openmp_flags(self) -> T.List[str]:
+        return []
+
+    def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return []
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        return args[:]
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        return args[:]
+
+    def get_link_debugfile_name(self, targetfile: str) -> str:
+        return None
+
+    def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+        # Static libraries do not have PDB files
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return []
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        """The format of the RSP file that this compiler supports.
+
+        If `self.can_linker_accept_rsp()` returns True, then this needs to
+        be implemented
+        """
+        assert not self.can_linker_accept_rsp(), f'{self.id} linker accepts RSP, but doesn\' provide a supported format, this is a bug'
+        raise EnvironmentException(f'{self.id} does not implemnt rsp format, this shouldn\'t be called')
+
+
+class VisualStudioLikeLinker:
+    always_args = ['/NOLOGO']
+
+    def __init__(self, machine: str):
+        self.machine = machine
+
+    def get_always_args(self) -> T.List[str]:
+        return self.always_args.copy()
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return self.always_args.copy()
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        args = []  # type: T.List[str]
+        if self.machine:
+            args += ['/MACHINE:' + self.machine]
+        args += ['/OUT:' + target]
+        return args
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        from ..compilers import VisualStudioCCompiler
+        return VisualStudioCCompiler.unix_args_to_native(args)
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        from ..compilers import VisualStudioCCompiler
+        return VisualStudioCCompiler.native_args_to_unix(args)
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.MSVC
+
+
+class VisualStudioLinker(VisualStudioLikeLinker, StaticLinker):
+
+    """Microsoft's lib static linker."""
+
+    def __init__(self, exelist: T.List[str], machine: str):
+        StaticLinker.__init__(self, exelist)
+        VisualStudioLikeLinker.__init__(self, machine)
+
+
+class IntelVisualStudioLinker(VisualStudioLikeLinker, StaticLinker):
+
+    """Intel's xilib static linker."""
+
+    def __init__(self, exelist: T.List[str], machine: str):
+        StaticLinker.__init__(self, exelist)
+        VisualStudioLikeLinker.__init__(self, machine)
+
+
+class ArLinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ar'
+        pc, stdo = mesonlib.Popen_safe(self.exelist + ['-h'])[0:2]
+        # Enable deterministic builds if they are available.
+        if '[D]' in stdo:
+            self.std_args = ['csrD']
+        else:
+            self.std_args = ['csr']
+        self.can_rsp = '@<' in stdo
+
+    def can_linker_accept_rsp(self) -> bool:
+        return self.can_rsp
+
+    def get_std_link_args(self) -> T.List[str]:
+        return self.std_args
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [target]
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.GCC
+
+
+class ArmarLinker(ArLinker):  # lgtm [py/missing-call-to-init]
+
+    def __init__(self, exelist: T.List[str]):
+        StaticLinker.__init__(self, exelist)
+        self.id = 'armar'
+        self.std_args = ['-csr']
+
+    def can_linker_accept_rsp(self) -> bool:
+        # armar can't accept arguments using the @rsp syntax
+        return False
+
+
+class DLinker(StaticLinker):
+    def __init__(self, exelist: T.List[str], arch: str, *, rsp_syntax: RSPFileSyntax = RSPFileSyntax.GCC):
+        super().__init__(exelist)
+        self.id = exelist[0]
+        self.arch = arch
+        self.__rsp_syntax = rsp_syntax
+
+    def get_std_link_args(self) -> T.List[str]:
+        return ['-lib']
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-of=' + target]
+
+    def get_linker_always_args(self) -> T.List[str]:
+        if mesonlib.is_windows():
+            if self.arch == 'x86_64':
+                return ['-m64']
+            elif self.arch == 'x86_mscoff' and self.id == 'dmd':
+                return ['-m32mscoff']
+            return ['-m32']
+        return []
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return self.__rsp_syntax
+
+
+class CcrxLinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'rlink'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'-output={target}']
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['-nologo', '-form=library']
+
+
+class Xc16Linker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'xc16-ar'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'{target}']
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['rcs']
+
+class CompCertLinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ccomp'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'-o{target}']
+
+
+class C2000Linker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ar2000'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'{target}']
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['-r']
+
+
+class AIXArLinker(ArLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        StaticLinker.__init__(self, exelist)
+        self.id = 'aixar'
+        self.std_args = ['-csr', '-Xany']
+
+    def can_linker_accept_rsp(self) -> bool:
+        # AIXAr can't accept arguments using the @rsp syntax
+        return False
+
+
+def prepare_rpaths(raw_rpaths: str, build_dir: str, from_dir: str) -> T.List[str]:
+    # The rpaths we write must be relative if they point to the build dir,
+    # because otherwise they have different length depending on the build
+    # directory. This breaks reproducible builds.
+    internal_format_rpaths = [evaluate_rpath(p, build_dir, from_dir) for p in raw_rpaths]
+    ordered_rpaths = order_rpaths(internal_format_rpaths)
+    return ordered_rpaths
+
+
+def order_rpaths(rpath_list: T.List[str]) -> T.List[str]:
+    # We want rpaths that point inside our build dir to always override
+    # those pointing to other places in the file system. This is so built
+    # binaries prefer our libraries to the ones that may lie somewhere
+    # in the file system, such as /lib/x86_64-linux-gnu.
+    #
+    # The correct thing to do here would be C++'s std::stable_partition.
+    # Python standard library does not have it, so replicate it with
+    # sort, which is guaranteed to be stable.
+    return sorted(rpath_list, key=os.path.isabs)
+
+
+def evaluate_rpath(p: str, build_dir: str, from_dir: str) -> str:
+    if p == from_dir:
+        return '' # relpath errors out in this case
+    elif os.path.isabs(p):
+        return p # These can be outside of build dir.
+    else:
+        return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir))
+
+class DynamicLinker(metaclass=abc.ABCMeta):
+
+    """Base class for dynamic linkers."""
+
+    _BUILDTYPE_ARGS = {
+        'plain': [],
+        'debug': [],
+        'debugoptimized': [],
+        'release': [],
+        'minsize': [],
+        'custom': [],
+    }  # type: T.Dict[str, T.List[str]]
+
+    @abc.abstractproperty
+    def id(self) -> str:
+        pass
+
+    def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]:
+        args = [arg] if isinstance(arg, str) else arg
+        if self.prefix_arg is None:
+            return args
+        elif isinstance(self.prefix_arg, str):
+            return [self.prefix_arg + arg for arg in args]
+        ret = []
+        for arg in args:
+            ret += self.prefix_arg + [arg]
+        return ret
+
+    def __init__(self, exelist: T.List[str],
+                 for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+                 always_args: T.List[str], *, version: str = 'unknown version'):
+        self.exelist = exelist
+        self.for_machine = for_machine
+        self.version = version
+        self.prefix_arg = prefix_arg
+        self.always_args = always_args
+        self.machine = None  # type: T.Optional[str]
+
+    def __repr__(self) -> str:
+        return '<{}: v{} `{}`>'.format(type(self).__name__, self.version, ' '.join(self.exelist))
+
+    def get_id(self) -> str:
+        return self.id
+
+    def get_version_string(self) -> str:
+        return f'({self.id} {self.version})'
+
+    def get_exelist(self) -> T.List[str]:
+        return self.exelist.copy()
+
+    def get_accepts_rsp(self) -> bool:
+        # rsp files are only used when building on Windows because we want to
+        # avoid issues with quoting and max argument length
+        return mesonlib.is_windows()
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        """The format of the RSP file that this compiler supports.
+
+        If `self.can_linker_accept_rsp()` returns True, then this needs to
+        be implemented
+        """
+        return RSPFileSyntax.GCC
+
+    def get_always_args(self) -> T.List[str]:
+        return self.always_args.copy()
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    # XXX: is use_ldflags a compiler or a linker attribute?
+
+    def get_option_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return []
+
+    def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+        raise EnvironmentException(f'Language {self.id} does not support has_multi_link_arguments.')
+
+    def get_debugfile_name(self, targetfile: str) -> str:
+        '''Name of debug file written out (see below)'''
+        return None
+
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        """Some compilers (MSVC) write debug into a separate file.
+
+        This method takes the target object path and returns a list of
+        commands to append to the linker invocation to control where that
+        file is written.
+        """
+        return []
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return self.get_std_shared_lib_args()
+
+    def get_pie_args(self) -> T.List[str]:
+        # TODO: this really needs to take a boolean and return the args to
+        # disable pie, otherwise it only acts to enable pie if pie *isn't* the
+        # default.
+        raise EnvironmentException(f'Linker {self.id} does not support position-independent executable')
+
+    def get_lto_args(self) -> T.List[str]:
+        return []
+
+    def sanitizer_args(self, value: str) -> T.List[str]:
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        # We can override these in children by just overriding the
+        # _BUILDTYPE_ARGS value.
+        return self._BUILDTYPE_ARGS[buildtype]
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        raise EnvironmentException(
+            f'Linker {self.id} does not support link_whole')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        raise EnvironmentException(
+            f'Linker {self.id} does not support allow undefined')
+
+    @abc.abstractmethod
+    def get_output_args(self, outname: str) -> T.List[str]:
+        pass
+
+    def get_coverage_args(self) -> T.List[str]:
+        raise EnvironmentException(f"Linker {self.id} doesn't implement coverage data generation.")
+
+    @abc.abstractmethod
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        pass
+
+    def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        """The name of the outputted import library.
+
+        This implementation is used only on Windows by compilers that use GNU ld
+        """
+        return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def no_undefined_args(self) -> T.List[str]:
+        """Arguments to error if there are any undefined symbols at link time.
+
+        This is the inverse of get_allow_undefined_args().
+
+        TODO: A future cleanup might merge this and
+              get_allow_undefined_args() into a single method taking a
+              boolean
+        """
+        return []
+
+    def fatal_warnings(self) -> T.List[str]:
+        """Arguments to make all warnings errors."""
+        return []
+
+    def headerpad_args(self) -> T.List[str]:
+        # Only used by the Apple linker
+        return []
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        # Only used by VisualStudioLikeLinkers
+        return []
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        # Only used if supported by the dynamic linker and
+        # only when targeting Windows
+        return []
+
+    def bitcode_args(self) -> T.List[str]:
+        raise MesonException('This linker does not support bitcode bundles')
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
+
+
+class PosixDynamicLinkerMixin:
+
+    """Mixin class for POSIX-ish linkers.
+
+    This is obviously a pretty small subset of the linker interface, but
+    enough dynamic linkers that meson supports are POSIX-like but not
+    GNU-like that it makes sense to split this out.
+    """
+
+    def get_output_args(self, outname: str) -> T.List[str]:
+        return ['-o', outname]
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return ['-shared']
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return ['-L' + dirname]
+
+
+class GnuLikeDynamicLinkerMixin:
+
+    """Mixin class for dynamic linkers that provides gnu-like interface.
+
+    This acts as a base for the GNU linkers (bfd and gold), LLVM's lld, and
+    other linkers like GNU-ld.
+    """
+
+    if T.TYPE_CHECKING:
+        for_machine = MachineChoice.HOST
+        def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
+    _BUILDTYPE_ARGS = {
+        'plain': [],
+        'debug': [],
+        'debugoptimized': [],
+        'release': ['-O1'],
+        'minsize': [],
+        'custom': [],
+    }  # type: T.Dict[str, T.List[str]]
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        # We can override these in children by just overriding the
+        # _BUILDTYPE_ARGS value.
+        return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
+    def get_pie_args(self) -> T.List[str]:
+        return ['-pie']
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix('--as-needed')
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('--allow-shlib-undefined')
+
+    def get_lto_args(self) -> T.List[str]:
+        return ['-flto']
+
+    def sanitizer_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        return ['-fsanitize=' + value]
+
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
+    def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+        m = env.machines[self.for_machine]
+        if m.is_windows() or m.is_cygwin():
+            return self._apply_prefix('--export-all-symbols')
+        return self._apply_prefix('-export-dynamic')
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        return self._apply_prefix('--out-implib=' + implibname)
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        if env.machines[self.for_machine].is_haiku():
+            return []
+        return ['-pthread']
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('--no-undefined')
+
+    def fatal_warnings(self) -> T.List[str]:
+        return self._apply_prefix('--fatal-warnings')
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        m = env.machines[self.for_machine]
+        if m.is_windows() or m.is_cygwin():
+            # For PE/COFF the soname argument has no effect
+            return []
+        sostr = '' if soversion is None else '.' + soversion
+        return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        m = env.machines[self.for_machine]
+        if m.is_windows() or m.is_cygwin():
+            return ([], set())
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        args = []
+        origin_placeholder = '$ORIGIN'
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        # Need to deduplicate rpaths, as macOS's install_name_tool
+        # is *very* allergic to duplicate -delete_rpath arguments
+        # when calling depfixer on installation.
+        all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+        rpath_dirs_to_remove = set()
+        for p in all_paths:
+            rpath_dirs_to_remove.add(p.encode('utf8'))
+        # Build_rpath is used as-is (it is usually absolute).
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+            for p in build_rpath.split(':'):
+                rpath_dirs_to_remove.add(p.encode('utf8'))
+
+        # TODO: should this actually be "for (dragonfly|open)bsd"?
+        if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
+            # This argument instructs the compiler to record the value of
+            # ORIGIN in the .dynamic section of the elf. On Linux this is done
+            # by default, but is not on dragonfly/openbsd for some reason. Without this
+            # $ORIGIN in the runtime path will be undefined and any binaries
+            # linked against local libraries will fail to resolve them.
+            args.extend(self._apply_prefix('-z,origin'))
+
+        # In order to avoid relinking for RPATH removal, the binary needs to contain just
+        # enough space in the ELF header to hold the final installation RPATH.
+        paths = ':'.join(all_paths)
+        if len(paths) < len(install_rpath):
+            padding = 'X' * (len(install_rpath) - len(paths))
+            if not paths:
+                paths = padding
+            else:
+                paths = paths + ':' + padding
+        args.extend(self._apply_prefix('-rpath,' + paths))
+
+        # TODO: should this actually be "for solaris/sunos"?
+        if mesonlib.is_sunos():
+            return (args, rpath_dirs_to_remove)
+
+        # Rpaths to use while linking must be absolute. These are not
+        # written to the binary. Needed only with GNU ld:
+        # https://sourceware.org/bugzilla/show_bug.cgi?id=16936
+        # Not needed on Windows or other platforms that don't use RPATH
+        # https://github.com/mesonbuild/meson/issues/1897
+        #
+        # In addition, this linker option tends to be quite long and some
+        # compilers have trouble dealing with it. That's why we will include
+        # one option per folder, like this:
+        #
+        #   -Wl,-rpath-link,/path/to/folder1 -Wl,-rpath,/path/to/folder2 ...
+        #
+        # ...instead of just one single looooong option, like this:
+        #
+        #   -Wl,-rpath-link,/path/to/folder1:/path/to/folder2:...
+        for p in rpath_paths:
+            args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
+
+        return (args, rpath_dirs_to_remove)
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        if 'windows' in value:
+            args = ['--subsystem,windows']
+        elif 'console' in value:
+            args = ['--subsystem,console']
+        else:
+            raise MesonException(f'Only "windows" and "console" are supported for win_subsystem with MinGW, not "{value}".')
+        if ',' in value:
+            args[-1] = args[-1] + ':' + value.split(',')[1]
+
+        return self._apply_prefix(args)
+
+
+class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Apple's ld implementation."""
+
+    id = 'ld64'
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix('-dead_strip_dylibs')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('-undefined,dynamic_lookup')
+
+    def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return ['-bundle'] + self._apply_prefix('-undefined,dynamic_lookup')
+
+    def get_pie_args(self) -> T.List[str]:
+        return []
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        result = []  # type: T.List[str]
+        for a in args:
+            result.extend(self._apply_prefix('-force_load'))
+            result.append(a)
+        return result
+
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
+    def sanitizer_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        return ['-fsanitize=' + value]
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('-undefined,error')
+
+    def headerpad_args(self) -> T.List[str]:
+        return self._apply_prefix('-headerpad_max_install_names')
+
+    def bitcode_args(self) -> T.List[str]:
+        return self._apply_prefix('-bitcode_bundle')
+
+    def fatal_warnings(self) -> T.List[str]:
+        return self._apply_prefix('-fatal_warnings')
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        if is_shared_module:
+            return []
+        install_name = ['@rpath/', prefix, shlib_name]
+        if soversion is not None:
+            install_name.append('.' + soversion)
+        install_name.append('.dylib')
+        args = ['-install_name', ''.join(install_name)]
+        if darwin_versions:
+            args.extend(['-compatibility_version', darwin_versions[0],
+                         '-current_version', darwin_versions[1]])
+        return args
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        args = []
+        # @loader_path is the equivalent of $ORIGIN on macOS
+        # https://stackoverflow.com/q/26280738
+        origin_placeholder = '@loader_path'
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+        for rp in all_paths:
+            args.extend(self._apply_prefix('-rpath,' + rp))
+
+        return (args, set())
+
+
+class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Representation of GNU ld.bfd and ld.gold."""
+
+    def get_accepts_rsp(self) -> bool:
+        return True
+
+
+class GnuGoldDynamicLinker(GnuDynamicLinker):
+
+    id = 'ld.gold'
+
+
+class GnuBFDDynamicLinker(GnuDynamicLinker):
+
+    id = 'ld.bfd'
+
+
+class LLVMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Representation of LLVM's ld.lld linker.
+
+    This is only the gnu-like linker, not the apple like or link.exe like
+    linkers.
+    """
+
+    id = 'ld.lld'
+
+    def __init__(self, exelist: T.List[str],
+                 for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+                 always_args: T.List[str], *, version: str = 'unknown version'):
+        super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)
+
+        # Some targets don't seem to support this argument (windows, wasm, ...)
+        _, _, e = mesonlib.Popen_safe(self.exelist + self._apply_prefix('--allow-shlib-undefined'))
+        self.has_allow_shlib_undefined = not ('unknown argument: --allow-shlib-undefined' in e)
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        if self.has_allow_shlib_undefined:
+            return self._apply_prefix('--allow-shlib-undefined')
+        return []
+
+
+class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Emscripten's wasm-ld."""
+
+    id = 'ld.wasm'
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0']
+
+    def no_undefined_args(self) -> T.List[str]:
+        return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=1']
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        raise MesonException(f'{self.id} does not support shared libraries.')
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+
+class CcrxDynamicLinker(DynamicLinker):
+
+    """Linker for Renesis CCrx compiler."""
+
+    id = 'rlink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['rlink.exe'], for_machine, '', [],
+                         version=version)
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return '-lib='
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'-output={outputname}']
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise OSError('rlink.exe does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
+
+
+class Xc16DynamicLinker(DynamicLinker):
+
+    """Linker for Microchip XC16 compiler."""
+
+    id = 'xc16-gcc'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['xc16-gcc.exe'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'-o{outputname}']
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise OSError('xc16-gcc.exe does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+class CompCertDynamicLinker(DynamicLinker):
+
+    """Linker for CompCert C compiler."""
+
+    id = 'ccomp'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['ccomp'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('-Wl,--whole-archive') + args + self._apply_prefix('-Wl,--no-whole-archive')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'-o{outputname}']
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return [f'-L{dirname}']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        raise MesonException(f'{self.id} does not support shared libraries.')
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+class C2000DynamicLinker(DynamicLinker):
+
+    """Linker for Texas Instruments C2000 compiler."""
+
+    id = 'cl2000'
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(exelist or ['cl2000.exe'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return '-l='
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-z', f'--output_file={outputname}']
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise OSError('cl2000.exe does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+
+class ArmDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Linker for the ARM compiler."""
+
+    id = 'armlink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['armlink'], for_machine, '', [],
+                         version=version)
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_std_shared_lib_args(self) -> 'T.NoReturn':
+        raise MesonException('The Arm Linkers do not support shared libraries')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+
+class ArmClangDynamicLinker(ArmDynamicLinker):
+
+    """Linker used with ARM's clang fork.
+
+    The interface is similar enough to the old ARM ld that it inherits and
+    extends a few things as needed.
+    """
+
+    def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+        return ['--export_dynamic']
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        return ['--symdefs=' + implibname]
+
+class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
+
+    """ARM Linker from Snapdragon LLVM ARM Compiler."""
+
+    id = 'ld.qcld'
+
+
+class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """PGI linker."""
+
+    id = 'pgi'
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        # PGI -shared is Linux only.
+        if mesonlib.is_windows():
+            return ['-Bdynamic', '-Mmakedll']
+        elif mesonlib.is_linux():
+            return ['-shared']
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not env.machines[self.for_machine].is_windows():
+            return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
+        return ([], set())
+
+NvidiaHPC_DynamicLinker = PGIDynamicLinker
+
+
+class PGIStaticLinker(StaticLinker):
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ar'
+        self.std_args = ['-r']
+
+    def get_std_link_args(self) -> T.List[str]:
+        return self.std_args
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [target]
+
+NvidiaHPC_StaticLinker = PGIStaticLinker
+
+
+class VisualStudioLikeLinkerMixin:
+
+    """Mixin class for for dynamic linkers that act like Microsoft's link.exe."""
+
+    if T.TYPE_CHECKING:
+        for_machine = MachineChoice.HOST
+        def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
+    _BUILDTYPE_ARGS = {
+        'plain': [],
+        'debug': [],
+        'debugoptimized': [],
+        # The otherwise implicit REF and ICF linker optimisations are disabled by
+        # /DEBUG. REF implies ICF.
+        'release': ['/OPT:REF'],
+        'minsize': ['/INCREMENTAL:NO', '/OPT:REF'],
+        'custom': [],
+    }  # type: T.Dict[str, T.List[str]]
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 prefix_arg: T.Union[str, T.List[str]], always_args: T.List[str], *,
+                 version: str = 'unknown version', direct: bool = True, machine: str = 'x86'):
+        # There's no way I can find to make mypy understand what's going on here
+        super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)  # type: ignore
+        self.machine = machine
+        self.direct = direct
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
+    def invoked_by_compiler(self) -> bool:
+        return not self.direct
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname])
+
+    def get_always_args(self) -> T.List[str]:
+        parent = super().get_always_args() # type: ignore
+        return self._apply_prefix('/nologo') + T.cast(T.List[str], parent)
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return self._apply_prefix('/LIBPATH:' + dirname)
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return self._apply_prefix('/DLL')
+
+    def get_debugfile_name(self, targetfile: str) -> str:
+        basename = targetfile.rsplit('.', maxsplit=1)[0]
+        return basename + '.pdb'
+
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        return self._apply_prefix(['/DEBUG', '/PDB:' + self.get_debugfile_name(targetfile)])
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        # Only since VS2015
+        args = mesonlib.listify(args)
+        l = []  # T.List[str]
+        for a in args:
+            l.extend(self._apply_prefix('/WHOLEARCHIVE:' + a))
+        return l
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        """The command to generate the import library."""
+        return self._apply_prefix(['/IMPLIB:' + implibname])
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.MSVC
+
+
+class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Microsoft's Link.exe."""
+
+    id = 'link'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(exelist or ['link.exe'], for_machine,
+                         prefix, always_args, machine=machine, version=version, direct=direct)
+
+    def get_always_args(self) -> T.List[str]:
+        return self._apply_prefix(['/nologo', '/release']) + super().get_always_args()
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return self.get_win_subsystem_args("windows" if value else "console")
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class ClangClDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Clang's lld-link.exe."""
+
+    id = 'lld-link'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(exelist or ['lld-link.exe'], for_machine,
+                         prefix, always_args, machine=machine, version=version, direct=direct)
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        # If we're being driven indirectly by clang just skip /MACHINE
+        # as clang's target triple will handle the machine selection
+        if self.machine is None:
+            return self._apply_prefix([f"/OUT:{outputname}"])
+
+        return super().get_output_args(outputname)
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return self.get_win_subsystem_args("windows" if value else "console")
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class XilinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Intel's Xilink.exe."""
+
+    id = 'xilink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(['xilink.exe'], for_machine, '', always_args, version=version)
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return self.get_win_subsystem_args("windows" if value else "console")
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Sys-V derived linker used on Solaris and OpenSolaris."""
+
+    id = 'ld.solaris'
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+
+    def get_pie_args(self) -> T.List[str]:
+        # Available in Solaris 11.2 and later
+        pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp'))
+        for line in (stdo + stde).split('\n'):
+            if '-z type' in line:
+                if 'pie' in line:
+                    return ['-z', 'type=pie']
+                break
+        return []
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix(['-z', 'ignore'])
+
+    def no_undefined_args(self) -> T.List[str]:
+        return ['-z', 'defs']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return ['-z', 'nodefs']
+
+    def fatal_warnings(self) -> T.List[str]:
+        return ['-z', 'fatal-warnings']
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
+        rpath_dirs_to_remove = set()
+        for p in all_paths:
+            rpath_dirs_to_remove.add(p.encode('utf8'))
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+            for p in build_rpath.split(':'):
+                rpath_dirs_to_remove.add(p.encode('utf8'))
+
+        # In order to avoid relinking for RPATH removal, the binary needs to contain just
+        # enough space in the ELF header to hold the final installation RPATH.
+        paths = ':'.join(all_paths)
+        if len(paths) < len(install_rpath):
+            padding = 'X' * (len(install_rpath) - len(paths))
+            if not paths:
+                paths = padding
+            else:
+                paths = paths + ':' + padding
+        return (self._apply_prefix(f'-rpath,{paths}'), rpath_dirs_to_remove)
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        sostr = '' if soversion is None else '.' + soversion
+        return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
+
+
+class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Sys-V derived linker used on AIX"""
+
+    id = 'ld.aix'
+
+    def get_always_args(self) -> T.List[str]:
+        return self._apply_prefix(['-bnoipath', '-bbigtoc']) + super().get_always_args()
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix(['-bernotok'])
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix(['-berok'])
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: str, build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        all_paths = mesonlib.OrderedSet() # type: mesonlib.OrderedSet[str]
+        # install_rpath first, followed by other paths, and the system path last
+        if install_rpath != '':
+            all_paths.add(install_rpath)
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+        for p in rpath_paths:
+            all_paths.add(os.path.join(build_dir, p))
+        # We should consider allowing the $LIBPATH environment variable
+        # to override sys_path.
+        sys_path = env.get_compiler_system_dirs(self.for_machine)
+        if len(sys_path) == 0:
+            # get_compiler_system_dirs doesn't support our compiler.
+            # Use the default system library path
+            all_paths.update(['/usr/lib','/lib'])
+        else:
+            # Include the compiler's default library paths, but filter out paths that don't exist
+            for p in sys_path:
+                if os.path.isdir(p):
+                    all_paths.add(p)
+        return (self._apply_prefix('-blibpath:' + ':'.join(all_paths)), set())
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return ['-pthread']
+
+
+class OptlinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Digital Mars dynamic linker for windows."""
+
+    id = 'optlink'
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        # Use optlink instead of link so we don't interfer with other link.exe
+        # implementations.
+        super().__init__(exelist, for_machine, '', [], version=version)
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        # Optlink does not generate pdb files.
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+
+class CudaLinker(PosixDynamicLinkerMixin, DynamicLinker):
+    """Cuda linker (nvlink)"""
+
+    id = 'nvlink'
+
+    @staticmethod
+    def parse_version() -> str:
+        version_cmd = ['nvlink', '--version']
+        try:
+            _, out, _ = mesonlib.Popen_safe(version_cmd)
+        except OSError:
+            return 'unknown version'
+        # Output example:
+        # nvlink: NVIDIA (R) Cuda linker
+        # Copyright (c) 2005-2018 NVIDIA Corporation
+        # Built on Sun_Sep_30_21:09:22_CDT_2018
+        # Cuda compilation tools, release 10.0, V10.0.166
+        # we need the most verbose version output. Luckily starting with V
+        return out.strip().split('V')[-1]
+
+    def get_accepts_rsp(self) -> bool:
+        # nvcc does not support response files
+        return False
+
+    def get_lib_prefix(self) -> str:
+        if not mesonlib.is_windows():
+            return ''
+        # nvcc doesn't recognize Meson's default .a extension for static libraries on
+        # Windows and passes it to cl as an object file, resulting in 'warning D9024 :
+        # unrecognized source file type 'xxx.a', object file assumed'.
+        #
+        # nvcc's --library= option doesn't help: it takes the library name without the
+        # extension and assumes that the extension on Windows is .lib; prefixing the
+        # library with -Xlinker= seems to work.
+        from ..compilers import CudaCompiler
+        return CudaCompiler.LINKER_PREFIX
+
+    def fatal_warnings(self) -> T.List[str]:
+        return ['--warning-as-error']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+                        is_shared_module: bool) -> T.List[str]:
+        return []
diff --git a/meson/mesonbuild/mcompile.py b/meson/mesonbuild/mcompile.py
new file mode 100644
index 000000000..bb7ecae9b
--- /dev/null
+++ b/meson/mesonbuild/mcompile.py
@@ -0,0 +1,358 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Entrypoint script for backend agnostic compile."""
+
+import os
+import json
+import re
+import sys
+import shutil
+import typing as T
+from collections import defaultdict
+from pathlib import Path
+
+from . import mlog
+from . import mesonlib
+from . import coredata
+from .mesonlib import MesonException
+from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
+
+if T.TYPE_CHECKING:
+    import argparse
+
+def array_arg(value: str) -> T.List[str]:
+    return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
+    if not (builddir / 'meson-private' / 'coredata.dat' ).is_file():
+        raise MesonException(f'Current directory is not a meson build directory: `{builddir}`.\n'
+                             'Please specify a valid build dir or change the working directory to it.\n'
+                             'It is also possible that the build directory was generated with an old\n'
+                             'meson version. Please regenerate it in this case.')
+
+def get_backend_from_coredata(builddir: Path) -> str:
+    """
+    Gets `backend` option value from coredata
+    """
+    backend = coredata.load(str(builddir)).get_option(mesonlib.OptionKey('backend'))
+    assert isinstance(backend, str)
+    return backend
+
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+    """
+    Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+    """
+    path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+    if not path_to_intro.exists():
+        raise MesonException(f'`{path_to_intro.name}` is missing! Directory is not configured yet?')
+    with path_to_intro.open(encoding='utf-8') as f:
+        schema = json.load(f)
+
+    parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+    for target in schema:
+        parsed_data[target['name']] += [target]
+    return parsed_data
+
+class ParsedTargetName:
+    full_name = ''
+    name = ''
+    type = ''
+    path = ''
+
+    def __init__(self, target: str):
+        self.full_name = target
+        split = target.rsplit(':', 1)
+        if len(split) > 1:
+            self.type = split[1]
+            if not self._is_valid_type(self.type):
+                raise MesonException(f'Can\'t invoke target `{target}`: unknown target type: `{self.type}`')
+
+        split = split[0].rsplit('/', 1)
+        if len(split) > 1:
+            self.path = split[0]
+            self.name = split[1]
+        else:
+            self.name = split[0]
+
+    @staticmethod
+    def _is_valid_type(type: str) -> bool:
+        # Amend docs in Commands.md when editing this list
+        allowed_types = {
+            'executable',
+            'static_library',
+            'shared_library',
+            'shared_module',
+            'custom',
+            'run',
+            'jar',
+        }
+        return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
+    if target.name not in introspect_data:
+        raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
+
+    intro_targets = introspect_data[target.name]
+    found_targets = []  # type: T.List[T.Dict[str, T.Any]]
+
+    resolved_bdir = builddir.resolve()
+
+    if not target.type and not target.path:
+        found_targets = intro_targets
+    else:
+        for intro_target in intro_targets:
+            if (intro_target['subproject'] or
+                    (target.type and target.type != intro_target['type'].replace(' ', '_')) or
+                    (target.path
+                         and intro_target['filename'] != 'no_name'
+                         and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+                continue
+            found_targets += [intro_target]
+
+    if not found_targets:
+        raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
+    elif len(found_targets) > 1:
+        raise MesonException(f'Can\'t invoke target `{target.full_name}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`')
+
+    return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+    intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+    if intro_target['type'] == 'run':
+        return [target.name]
+    else:
+        return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    runner = detect_ninja()
+    if runner is None:
+        raise MesonException('Cannot find ninja.')
+
+    cmd = runner
+    if not builddir.samefile('.'):
+        cmd.extend(['-C', builddir.as_posix()])
+
+    # If the value is set to < 1 then don't set anything, which let's
+    # ninja/samu decide what to do.
+    if options.jobs > 0:
+        cmd.extend(['-j', str(options.jobs)])
+    if options.load_average > 0:
+        cmd.extend(['-l', str(options.load_average)])
+
+    if options.verbose:
+        cmd.append('-v')
+
+    cmd += options.ninja_args
+
+    # operands must be processed after options/option-arguments
+    if options.targets:
+        intro_data = parse_introspect_data(builddir)
+        for t in options.targets:
+            cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+    if options.clean:
+        cmd.append('clean')
+
+    return cmd, None
+
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+    intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+    assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above'
+
+    # Normalize project name
+    # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+    target_name = re.sub(r"[\%\$\@\;\.\(\)']", '_', intro_target['id'])  # type: str
+    rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+    if rel_path != Path('.'):
+        target_name = str(rel_path / target_name)
+    return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    slns = list(builddir.glob('*.sln'))
+    assert len(slns) == 1, 'More than one solution in a project?'
+    sln = slns[0]
+
+    cmd = ['msbuild']
+
+    if options.targets:
+        intro_data = parse_introspect_data(builddir)
+        has_run_target = any(map(
+            lambda t:
+                get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
+            options.targets
+        ))
+
+        if has_run_target:
+            # `run` target can't be used the same way as other targets on `vs` backend.
+            # They are defined as disabled projects, which can't be invoked as `.sln`
+            # target and have to be invoked directly as project instead.
+            # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+            if len(options.targets) > 1:
+                raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+            intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+            proj_dir = Path(intro_target['filename'][0]).parent
+            proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+            cmd += [str(proj.resolve())]
+        else:
+            cmd += [str(sln.resolve())]
+            cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+    else:
+        cmd += [str(sln.resolve())]
+
+    if options.clean:
+        cmd.extend(['-target:Clean'])
+
+    # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
+    if options.jobs > 0:
+        cmd.append(f'-maxCpuCount:{options.jobs}')
+    else:
+        cmd.append('-maxCpuCount')
+
+    if options.load_average:
+        mlog.warning('Msbuild does not have a load-average switch, ignoring.')
+
+    if not options.verbose:
+        cmd.append('-verbosity:minimal')
+
+    cmd += options.vs_args
+
+    # Remove platform from env so that msbuild does not pick x86 platform when solution platform is Win32
+    env = os.environ.copy()
+    del env['PLATFORM']
+
+    return cmd, env
+
+def get_parsed_args_xcode(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    runner = 'xcodebuild'
+    if not shutil.which(runner):
+        raise MesonException('Cannot find xcodebuild, did you install XCode?')
+
+    # No argument to switch directory
+    os.chdir(str(builddir))
+
+    cmd = [runner, '-parallelizeTargets']
+
+    if options.targets:
+        for t in options.targets:
+            cmd += ['-target', t]
+
+    if options.clean:
+        if options.targets:
+            cmd += ['clean']
+        else:
+            cmd += ['-alltargets', 'clean']
+        # Otherwise xcodebuild tries to delete the builddir and fails
+        cmd += ['-UseNewBuildSystem=FALSE']
+
+    if options.jobs > 0:
+        cmd.extend(['-jobs', str(options.jobs)])
+
+    if options.load_average > 0:
+        mlog.warning('xcodebuild does not have a load-average switch, ignoring')
+
+    if options.verbose:
+        # xcodebuild is already quite verbose, and -quiet doesn't print any
+        # status messages
+        pass
+
+    cmd += options.xcode_args
+    return cmd, None
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    """Add compile specific arguments."""
+    parser.add_argument(
+        'targets',
+        metavar='TARGET',
+        nargs='*',
+        default=None,
+        help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+    parser.add_argument(
+        '--clean',
+        action='store_true',
+        help='Clean the build directory.'
+    )
+    parser.add_argument(
+        '-C',
+        action='store',
+        dest='builddir',
+        type=Path,
+        default='.',
+        help='The directory containing build files to be built.'
+    )
+    parser.add_argument(
+        '-j', '--jobs',
+        action='store',
+        default=0,
+        type=int,
+        help='The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess.'
+    )
+    parser.add_argument(
+        '-l', '--load-average',
+        action='store',
+        default=0,
+        type=int,
+        help='The system load average to try to maintain (if supported).'
+    )
+    parser.add_argument(
+        '-v', '--verbose',
+        action='store_true',
+        help='Show more verbose output.'
+    )
+    parser.add_argument(
+        '--ninja-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+    )
+    parser.add_argument(
+        '--vs-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
+    )
+    parser.add_argument(
+        '--xcode-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `xcodebuild` (applied only on `xcode` backend).'
+    )
+
+def run(options: 'argparse.Namespace') -> int:
+    bdir = options.builddir  # type: Path
+    validate_builddir(bdir.resolve())
+
+    cmd = []    # type: T.List[str]
+    env = None  # type: T.Optional[T.Dict[str, str]]
+
+    if options.targets and options.clean:
+        raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
+
+    backend = get_backend_from_coredata(bdir)
+    if backend == 'ninja':
+        cmd, env = get_parsed_args_ninja(options, bdir)
+    elif backend.startswith('vs'):
+        cmd, env = get_parsed_args_vs(options, bdir)
+    elif backend == 'xcode':
+        cmd, env = get_parsed_args_xcode(options, bdir)
+    else:
+        raise MesonException(
+            f'Backend `{backend}` is not yet supported by `compile`. Use generated project files directly instead.')
+
+    p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer, env=env)
+
+    return p.returncode
diff --git a/meson/mesonbuild/mconf.py b/meson/mesonbuild/mconf.py
new file mode 100644
index 000000000..4b3f33179
--- /dev/null
+++ b/meson/mesonbuild/mconf.py
@@ -0,0 +1,334 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import shutil
+import os
+import textwrap
+import typing as T
+
+from . import build
+from . import coredata
+from . import environment
+from . import mesonlib
+from . import mintro
+from . import mlog
+from .ast import AstIDGenerator
+from .mesonlib import MachineChoice, OptionKey
+
+if T.TYPE_CHECKING:
+    import argparse
+    from .coredata import UserOption
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    coredata.register_builtin_arguments(parser)
+    parser.add_argument('builddir', nargs='?', default='.')
+    parser.add_argument('--clearcache', action='store_true', default=False,
+                        help='Clear cached state (e.g. found dependencies)')
+
+def make_lower_case(val: T.Any) -> T.Union[str, T.List[T.Any]]:  # T.Any because of recursion...
+    if isinstance(val, bool):
+        return str(val).lower()
+    elif isinstance(val, list):
+        return [make_lower_case(i) for i in val]
+    else:
+        return str(val)
+
+
+class ConfException(mesonlib.MesonException):
+    pass
+
+
+class Conf:
+    def __init__(self, build_dir):
+        self.build_dir = os.path.abspath(os.path.realpath(build_dir))
+        if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
+            self.build_dir = os.path.dirname(self.build_dir)
+        self.build = None
+        self.max_choices_line_length = 60
+        self.name_col = []
+        self.value_col = []
+        self.choices_col = []
+        self.descr_col = []
+        # XXX: is there a case where this can actually remain false?
+        self.has_choices = False
+        self.all_subprojects: T.Set[str] = set()
+        self.yielding_options: T.Set[OptionKey] = set()
+
+        if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
+            self.build = build.load(self.build_dir)
+            self.source_dir = self.build.environment.get_source_dir()
+            self.coredata = coredata.load(self.build_dir)
+            self.default_values_only = False
+        elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
+            # Make sure that log entries in other parts of meson don't interfere with the JSON output
+            mlog.disable()
+            self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
+            intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
+            intr.analyze()
+            # Re-enable logging just in case
+            mlog.enable()
+            self.coredata = intr.coredata
+            self.default_values_only = True
+        else:
+            raise ConfException(f'Directory {build_dir} is neither a Meson build directory nor a project source directory.')
+
+    def clear_cache(self):
+        self.coredata.clear_deps_cache()
+
+    def set_options(self, options):
+        self.coredata.set_options(options)
+
+    def save(self):
+        # Do nothing when using introspection
+        if self.default_values_only:
+            return
+        # Only called if something has changed so overwrite unconditionally.
+        coredata.save(self.coredata, self.build_dir)
+        # We don't write the build file because any changes to it
+        # are erased when Meson is executed the next time, i.e. when
+        # Ninja is run.
+
+    def print_aligned(self) -> None:
+        """Do the actual printing.
+
+        This prints the generated output in an aligned, pretty form. it aims
+        for a total width of 160 characters, but will use whatever the tty
+        reports it's value to be. Though this is much wider than the standard
+        80 characters of terminals, and even than the newer 120, compressing
+        it to those lengths makes the output hard to read.
+
+        Each column will have a specific width, and will be line wrapped.
+        """
+        total_width = shutil.get_terminal_size(fallback=(160, 0))[0]
+        _col = max(total_width // 5, 20)
+        four_column = (_col, _col, _col, total_width - (3 * _col))
+        # In this case we don't have the choices field, so we can redistribute
+        # the extra 40 characters to val and desc
+        three_column = (_col, _col * 2, total_width // 2)
+
+        for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
+            if not any(line):
+                print('')
+                continue
+
+            # This is a header, like `Subproject foo:`,
+            # We just want to print that and get on with it
+            if line[0] and not any(line[1:]):
+                print(line[0])
+                continue
+
+            # wrap will take a long string, and create a list of strings no
+            # longer than the size given. Then that list can be zipped into, to
+            # print each line of the output, such the that columns are printed
+            # to the right width, row by row.
+            if self.has_choices:
+                name = textwrap.wrap(line[0], four_column[0])
+                val = textwrap.wrap(line[1], four_column[1])
+                choice = textwrap.wrap(line[2], four_column[2])
+                desc = textwrap.wrap(line[3], four_column[3])
+                for l in itertools.zip_longest(name, val, choice, desc, fillvalue=''):
+                    # We must use the length modifier here to get even rows, as
+                    # `textwrap.wrap` will only shorten, not lengthen each item
+                    print('{:{widths[0]}} {:{widths[1]}} {:{widths[2]}} {}'.format(*l, widths=four_column))
+            else:
+                name = textwrap.wrap(line[0], three_column[0])
+                val = textwrap.wrap(line[1], three_column[1])
+                desc = textwrap.wrap(line[3], three_column[2])
+                for l in itertools.zip_longest(name, val, desc, fillvalue=''):
+                    print('{:{widths[0]}} {:{widths[1]}} {}'.format(*l, widths=three_column))
+
+    def split_options_per_subproject(self, options: 'coredata.KeyedOptionDictType') -> T.Dict[str, T.Dict[str, 'UserOption']]:
+        result: T.Dict[str, T.Dict[str, 'UserOption']] = {}
+        for k, o in options.items():
+            subproject = k.subproject
+            if k.subproject:
+                k = k.as_root()
+                if o.yielding and k in options:
+                    self.yielding_options.add(k)
+                self.all_subprojects.add(subproject)
+            result.setdefault(subproject, {})[str(k)] = o
+        return result
+
+    def _add_line(self, name: OptionKey, value, choices, descr) -> None:
+        self.name_col.append(' ' * self.print_margin + str(name))
+        self.value_col.append(value)
+        self.choices_col.append(choices)
+        self.descr_col.append(descr)
+
+    def add_option(self, name, descr, value, choices):
+        if isinstance(value, list):
+            value = '[{}]'.format(', '.join(make_lower_case(value)))
+        else:
+            value = make_lower_case(value)
+
+        if choices:
+            self.has_choices = True
+            if isinstance(choices, list):
+                choices_list = make_lower_case(choices)
+                current = '['
+                while choices_list:
+                    i = choices_list.pop(0)
+                    if len(current) + len(i) >= self.max_choices_line_length:
+                        self._add_line(name, value, current + ',', descr)
+                        name = ''
+                        value = ''
+                        descr = ''
+                        current = ' '
+                    if len(current) > 1:
+                        current += ', '
+                    current += i
+                choices = current + ']'
+            else:
+                choices = make_lower_case(choices)
+        else:
+            choices = ''
+
+        self._add_line(name, value, choices, descr)
+
+    def add_title(self, title):
+        titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
+        if self.default_values_only:
+            titles['value'] = 'Default Value'
+        self._add_line('', '', '', '')
+        self._add_line(title, titles['value'], titles['choices'], titles['descr'])
+        self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
+
+    def add_section(self, section):
+        self.print_margin = 0
+        self._add_line('', '', '', '')
+        self._add_line(section + ':', '', '', '')
+        self.print_margin = 2
+
+    def print_options(self, title: str, options: 'coredata.KeyedOptionDictType') -> None:
+        if not options:
+            return
+        if title:
+            self.add_title(title)
+        for k, o in sorted(options.items()):
+            printable_value = o.printable_value()
+            if k in self.yielding_options:
+                printable_value = ''
+            self.add_option(k, o.description, printable_value, o.choices)
+
+    def print_conf(self):
+        def print_default_values_warning():
+            mlog.warning('The source directory instead of the build directory was specified.')
+            mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
+
+        if self.default_values_only:
+            print_default_values_warning()
+            print('')
+
+        print('Core properties:')
+        print('  Source dir', self.source_dir)
+        if not self.default_values_only:
+            print('  Build dir ', self.build_dir)
+
+        dir_option_names = set(coredata.BUILTIN_DIR_OPTIONS)
+        test_option_names = {OptionKey('errorlogs'),
+                            OptionKey('stdsplit')}
+
+        dir_options: 'coredata.KeyedOptionDictType' = {}
+        test_options: 'coredata.KeyedOptionDictType' = {}
+        core_options: 'coredata.KeyedOptionDictType' = {}
+        for k, v in self.coredata.options.items():
+            if k in dir_option_names:
+                dir_options[k] = v
+            elif k in test_option_names:
+                test_options[k] = v
+            elif k.is_builtin():
+                core_options[k] = v
+
+        host_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.HOST})
+        build_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.BUILD})
+        host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.HOST})
+        build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.BUILD})
+        project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_project()})
+        show_build_options = self.default_values_only or self.build.environment.is_cross_build()
+
+        self.add_section('Main project options')
+        self.print_options('Core options', host_core_options[''])
+        if show_build_options:
+            self.print_options('', build_core_options[''])
+        self.print_options('Backend options', {str(k): v for k, v in self.coredata.options.items() if k.is_backend()})
+        self.print_options('Base options', {str(k): v for k, v in self.coredata.options.items() if k.is_base()})
+        self.print_options('Compiler options', host_compiler_options.get('', {}))
+        if show_build_options:
+            self.print_options('', build_compiler_options.get('', {}))
+        self.print_options('Directories', dir_options)
+        self.print_options('Testing options', test_options)
+        self.print_options('Project options', project_options.get('', {}))
+        for subproject in sorted(self.all_subprojects):
+            if subproject == '':
+                continue
+            self.add_section('Subproject ' + subproject)
+            if subproject in host_core_options:
+                self.print_options('Core options', host_core_options[subproject])
+            if subproject in build_core_options and show_build_options:
+                self.print_options('', build_core_options[subproject])
+            if subproject in host_compiler_options:
+                self.print_options('Compiler options', host_compiler_options[subproject])
+            if subproject in build_compiler_options and show_build_options:
+                self.print_options('', build_compiler_options[subproject])
+            if subproject in project_options:
+                self.print_options('Project options', project_options[subproject])
+        self.print_aligned()
+
+        # Print the warning twice so that the user shouldn't be able to miss it
+        if self.default_values_only:
+            print('')
+            print_default_values_warning()
+
+        self.print_nondefault_buildtype_options()
+
+    def print_nondefault_buildtype_options(self):
+        mismatching = self.coredata.get_nondefault_buildtype_args()
+        if not mismatching:
+            return
+        print("\nThe following option(s) have a different value than the build type default\n")
+        print(f'               current   default')
+        for m in mismatching:
+            print(f'{m[0]:21}{m[1]:10}{m[2]:10}')
+
+def run(options):
+    coredata.parse_cmd_line_options(options)
+    builddir = os.path.abspath(os.path.realpath(options.builddir))
+    c = None
+    try:
+        c = Conf(builddir)
+        if c.default_values_only:
+            c.print_conf()
+            return 0
+
+        save = False
+        if options.cmd_line_options:
+            c.set_options(options.cmd_line_options)
+            coredata.update_cmd_line_file(builddir, options)
+            save = True
+        elif options.clearcache:
+            c.clear_cache()
+            save = True
+        else:
+            c.print_conf()
+        if save:
+            c.save()
+            mintro.update_build_options(c.coredata, c.build.environment.info_dir)
+            mintro.write_meson_info_file(c.build, [])
+    except ConfException as e:
+        print('Meson configurator encountered an error:')
+        if c is not None and c.build is not None:
+            mintro.write_meson_info_file(c.build, [e])
+        raise e
+    return 0
diff --git a/meson/mesonbuild/mdevenv.py b/meson/mesonbuild/mdevenv.py
new file mode 100644
index 000000000..c304cbb52
--- /dev/null
+++ b/meson/mesonbuild/mdevenv.py
@@ -0,0 +1,78 @@
+import os, subprocess
+import argparse
+import tempfile
+
+from pathlib import Path
+from . import build
+from .mesonlib import MesonException, RealPathAction, is_windows
+
+import typing as T
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument('command', nargs=argparse.REMAINDER,
+                        help='Command to run in developer environment (default: interactive shell)')
+
+def get_windows_shell() -> str:
+    mesonbuild = Path(__file__).parent
+    script = mesonbuild / 'scripts' / 'cmd_or_ps.ps1'
+    command = ['powershell.exe', '-noprofile', '-executionpolicy', 'bypass', '-file', str(script)]
+    result = subprocess.check_output(command)
+    return result.decode().strip()
+
+def get_env(b: build.Build, build_dir: str) -> T.Dict[str, str]:
+    env = os.environ.copy()
+    for i in b.devenv:
+        env = i.get_env(env)
+
+    extra_env = build.EnvironmentVariables()
+    extra_env.set('MESON_DEVENV', ['1'])
+    extra_env.set('MESON_PROJECT_NAME', [b.project_name])
+
+    meson_uninstalled = Path(build_dir) / 'meson-uninstalled'
+    if meson_uninstalled.is_dir():
+        extra_env.prepend('PKG_CONFIG_PATH', [str(meson_uninstalled)])
+
+    return extra_env.get_env(env)
+
+def run(options: argparse.Namespace) -> int:
+    buildfile = Path(options.wd) / 'meson-private' / 'build.dat'
+    if not buildfile.is_file():
+        raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
+    b = build.load(options.wd)
+
+    devenv = get_env(b, options.wd)
+
+    args = options.command
+    if not args:
+        prompt_prefix = f'[{b.project_name}]'
+        if is_windows():
+            shell = get_windows_shell()
+            if shell == 'powershell.exe':
+                args = ['powershell.exe']
+                args += ['-NoLogo', '-NoExit']
+                prompt = f'function global:prompt {{  "{prompt_prefix} PS " + $PWD + "> "}}'
+                args += ['-Command', prompt]
+            else:
+                args = [os.environ.get("COMSPEC", r"C:\WINDOWS\system32\cmd.exe")]
+                args += ['/k', f'prompt {prompt_prefix} $P$G']
+        else:
+            args = [os.environ.get("SHELL", os.path.realpath("/bin/sh"))]
+        if "bash" in args[0] and not os.environ.get("MESON_DISABLE_PS1_OVERRIDE"):
+            tmprc = tempfile.NamedTemporaryFile(mode='w')
+            bashrc = os.path.expanduser('~/.bashrc')
+            if os.path.exists(bashrc):
+                tmprc.write(f'. {bashrc}\n')
+            tmprc.write(f'export PS1="{prompt_prefix} $PS1"')
+            tmprc.flush()
+            # Let the GC remove the tmp file
+            args.append("--rcfile")
+            args.append(tmprc.name)
+
+    try:
+        return subprocess.call(args, close_fds=False,
+                               env=devenv,
+                               cwd=options.wd)
+    except subprocess.CalledProcessError as e:
+        return e.returncode
diff --git a/meson/mesonbuild/mdist.py b/meson/mesonbuild/mdist.py
new file mode 100644
index 000000000..afa1b4c56
--- /dev/null
+++ b/meson/mesonbuild/mdist.py
@@ -0,0 +1,319 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import gzip
+import os
+import sys
+import shutil
+import subprocess
+import hashlib
+import json
+from glob import glob
+from pathlib import Path
+from mesonbuild.environment import detect_ninja
+from mesonbuild.mesonlib import (MesonException, RealPathAction, quiet_git,
+                                 windows_proof_rmtree)
+from mesonbuild.wrap import wrap
+from mesonbuild import mlog, build
+from .scripts.meson_exe import run_exe
+
+archive_choices = ['gztar', 'xztar', 'zip']
+
+archive_extension = {'gztar': '.tar.gz',
+                     'xztar': '.tar.xz',
+                     'zip': '.zip'}
+
+def add_arguments(parser):
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument('--formats', default='xztar',
+                        help='Comma separated list of archive types to create. Supports xztar (default), gztar, and zip.')
+    parser.add_argument('--include-subprojects', action='store_true',
+                        help='Include source code of subprojects that have been used for the build.')
+    parser.add_argument('--no-tests', action='store_true',
+                        help='Do not build and test generated packages.')
+
+
+def create_hash(fname):
+    hashname = fname + '.sha256sum'
+    m = hashlib.sha256()
+    m.update(open(fname, 'rb').read())
+    with open(hashname, 'w', encoding='utf-8') as f:
+        # A space and an asterisk because that is the format defined by GNU coreutils
+        # and accepted by busybox and the Perl shasum tool.
+        f.write('{} *{}\n'.format(m.hexdigest(), os.path.basename(fname)))
+
+
+def del_gitfiles(dirname):
+    gitfiles = ('.git', '.gitattributes', '.gitignore', '.gitmodules')
+    for f in glob(os.path.join(dirname, '.git*')):
+        if os.path.split(f)[1] in gitfiles:
+            if os.path.isdir(f) and not os.path.islink(f):
+                windows_proof_rmtree(f)
+            else:
+                os.unlink(f)
+
+def process_submodules(dirname):
+    module_file = os.path.join(dirname, '.gitmodules')
+    if not os.path.exists(module_file):
+        return
+    subprocess.check_call(['git', 'submodule', 'update', '--init', '--recursive'], cwd=dirname)
+    for line in open(module_file, encoding='utf-8'):
+        line = line.strip()
+        if '=' not in line:
+            continue
+        k, v = line.split('=', 1)
+        k = k.strip()
+        v = v.strip()
+        if k != 'path':
+            continue
+        del_gitfiles(os.path.join(dirname, v))
+
+
+def run_dist_scripts(src_root, bld_root, dist_root, dist_scripts, subprojects):
+    assert(os.path.isabs(dist_root))
+    env = {}
+    env['MESON_DIST_ROOT'] = dist_root
+    env['MESON_SOURCE_ROOT'] = src_root
+    env['MESON_BUILD_ROOT'] = bld_root
+    for d in dist_scripts:
+        if d.subproject and d.subproject not in subprojects:
+            continue
+        subdir = subprojects.get(d.subproject, '')
+        env['MESON_PROJECT_DIST_ROOT'] = os.path.join(dist_root, subdir)
+        env['MESON_PROJECT_SOURCE_ROOT'] = os.path.join(src_root, subdir)
+        env['MESON_PROJECT_BUILD_ROOT'] = os.path.join(bld_root, subdir)
+        name = ' '.join(d.cmd_args)
+        print(f'Running custom dist script {name!r}')
+        try:
+            rc = run_exe(d, env)
+            if rc != 0:
+                sys.exit('Dist script errored out')
+        except OSError:
+            print(f'Failed to run dist script {name!r}')
+            sys.exit(1)
+
+def git_root(src_root):
+    # Cannot use --show-toplevel here because git in our CI prints cygwin paths
+    # that python cannot resolve. Workaround this by taking parent of src_root.
+    prefix = quiet_git(['rev-parse', '--show-prefix'], src_root, check=True)[1].strip()
+    if not prefix:
+        return Path(src_root)
+    prefix_level = len(Path(prefix).parents)
+    return Path(src_root).parents[prefix_level - 1]
+
+def is_git(src_root):
+    '''
+    Checks if meson.build file at the root source directory is tracked by git.
+    It could be a subproject part of the parent project git repository.
+    '''
+    return quiet_git(['ls-files', '--error-unmatch', 'meson.build'], src_root)[0]
+
+def git_have_dirty_index(src_root):
+    '''Check whether there are uncommitted changes in git'''
+    ret = subprocess.call(['git', '-C', src_root, 'diff-index', '--quiet', 'HEAD'])
+    return ret == 1
+
+def git_clone(src_root, distdir):
+    if git_have_dirty_index(src_root):
+        mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball')
+    if os.path.exists(distdir):
+        windows_proof_rmtree(distdir)
+    repo_root = git_root(src_root)
+    if repo_root.samefile(src_root):
+        os.makedirs(distdir)
+        subprocess.check_call(['git', 'clone', '--shared', src_root, distdir])
+    else:
+        subdir = Path(src_root).relative_to(repo_root)
+        tmp_distdir = distdir + '-tmp'
+        if os.path.exists(tmp_distdir):
+            windows_proof_rmtree(tmp_distdir)
+        os.makedirs(tmp_distdir)
+        subprocess.check_call(['git', 'clone', '--shared', '--no-checkout', str(repo_root), tmp_distdir])
+        subprocess.check_call(['git', 'checkout', 'HEAD', '--', str(subdir)], cwd=tmp_distdir)
+        Path(tmp_distdir, subdir).rename(distdir)
+        windows_proof_rmtree(tmp_distdir)
+    process_submodules(distdir)
+    del_gitfiles(distdir)
+
+def create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts, subprojects):
+    distdir = os.path.join(dist_sub, dist_name)
+    git_clone(src_root, distdir)
+    for path in subprojects.values():
+        sub_src_root = os.path.join(src_root, path)
+        sub_distdir = os.path.join(distdir, path)
+        if os.path.exists(sub_distdir):
+            continue
+        if is_git(sub_src_root):
+            git_clone(sub_src_root, sub_distdir)
+        else:
+            shutil.copytree(sub_src_root, sub_distdir)
+    run_dist_scripts(src_root, bld_root, distdir, dist_scripts, subprojects)
+    output_names = []
+    for a in archives:
+        compressed_name = distdir + archive_extension[a]
+        shutil.make_archive(distdir, a, root_dir=dist_sub, base_dir=dist_name)
+        output_names.append(compressed_name)
+    windows_proof_rmtree(distdir)
+    return output_names
+
+def is_hg(src_root):
+    return os.path.isdir(os.path.join(src_root, '.hg'))
+
+def hg_have_dirty_index(src_root):
+    '''Check whether there are uncommitted changes in hg'''
+    out = subprocess.check_output(['hg', '-R', src_root, 'summary'])
+    return b'commit: (clean)' not in out
+
+def create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts):
+    if hg_have_dirty_index(src_root):
+        mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball')
+    if dist_scripts:
+        mlog.warning('dist scripts are not supported in Mercurial projects')
+
+    os.makedirs(dist_sub, exist_ok=True)
+    tarname = os.path.join(dist_sub, dist_name + '.tar')
+    xzname = tarname + '.xz'
+    gzname = tarname + '.gz'
+    zipname = os.path.join(dist_sub, dist_name + '.zip')
+    # Note that -X interprets relative paths using the current working
+    # directory, not the repository root, so this must be an absolute path:
+    # https://bz.mercurial-scm.org/show_bug.cgi?id=6267
+    #
+    # .hg[a-z]* is used instead of .hg* to keep .hg_archival.txt, which may
+    # be useful to link the tarball to the Mercurial revision for either
+    # manual inspection or in case any code interprets it for a --version or
+    # similar.
+    subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar',
+                           '-X', src_root + '/.hg[a-z]*', tarname])
+    output_names = []
+    if 'xztar' in archives:
+        import lzma
+        with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf:
+            shutil.copyfileobj(tf, xf)
+        output_names.append(xzname)
+    if 'gztar' in archives:
+        with gzip.open(gzname, 'wb') as zf, open(tarname, 'rb') as tf:
+            shutil.copyfileobj(tf, zf)
+        output_names.append(gzname)
+    os.unlink(tarname)
+    if 'zip' in archives:
+        subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'zip', zipname])
+        output_names.append(zipname)
+    return output_names
+
+def run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args):
+    if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
+        print('Running Meson on distribution package failed')
+        return 1
+    if subprocess.call(ninja_args, cwd=builddir) != 0:
+        print('Compiling the distribution package failed')
+        return 1
+    if subprocess.call(ninja_args + ['test'], cwd=builddir) != 0:
+        print('Running unit tests on the distribution package failed')
+        return 1
+    myenv = os.environ.copy()
+    myenv['DESTDIR'] = installdir
+    if subprocess.call(ninja_args + ['install'], cwd=builddir, env=myenv) != 0:
+        print('Installing the distribution package failed')
+        return 1
+    return 0
+
+def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
+    print(f'Testing distribution package {packagename}')
+    unpackdir = os.path.join(privdir, 'dist-unpack')
+    builddir = os.path.join(privdir, 'dist-build')
+    installdir = os.path.join(privdir, 'dist-install')
+    for p in (unpackdir, builddir, installdir):
+        if os.path.exists(p):
+            windows_proof_rmtree(p)
+        os.mkdir(p)
+    ninja_args = detect_ninja()
+    shutil.unpack_archive(packagename, unpackdir)
+    unpacked_files = glob(os.path.join(unpackdir, '*'))
+    assert(len(unpacked_files) == 1)
+    unpacked_src_dir = unpacked_files[0]
+    with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json'), encoding='utf-8') as boptions:
+        meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
+                          if o['name'] not in ['backend', 'install_umask', 'buildtype']]
+    meson_command += extra_meson_args
+
+    ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args)
+    if ret > 0:
+        print(f'Dist check build directory was {builddir}')
+    else:
+        windows_proof_rmtree(unpackdir)
+        windows_proof_rmtree(builddir)
+        windows_proof_rmtree(installdir)
+        print(f'Distribution package {packagename} tested')
+    return ret
+
+def determine_archives_to_generate(options):
+    result = []
+    for i in options.formats.split(','):
+        if i not in archive_choices:
+            sys.exit(f'Value "{i}" not one of permitted values {archive_choices}.')
+        result.append(i)
+    if len(i) == 0:
+        sys.exit('No archive types specified.')
+    return result
+
+def run(options):
+    buildfile = Path(options.wd) / 'meson-private' / 'build.dat'
+    if not buildfile.is_file():
+        raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
+    b = build.load(options.wd)
+    # This import must be load delayed, otherwise it will get the default
+    # value of None.
+    from mesonbuild.mesonlib import get_meson_command
+    src_root = b.environment.source_dir
+    bld_root = b.environment.build_dir
+    priv_dir = os.path.join(bld_root, 'meson-private')
+    dist_sub = os.path.join(bld_root, 'meson-dist')
+
+    dist_name = b.project_name + '-' + b.project_version
+
+    archives = determine_archives_to_generate(options)
+
+    subprojects = {}
+    extra_meson_args = []
+    if options.include_subprojects:
+        subproject_dir = os.path.join(src_root, b.subproject_dir)
+        for sub in b.subprojects:
+            directory = wrap.get_directory(subproject_dir, sub)
+            subprojects[sub] = os.path.join(b.subproject_dir, directory)
+        extra_meson_args.append('-Dwrap_mode=nodownload')
+
+    if is_git(src_root):
+        names = create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts, subprojects)
+    elif is_hg(src_root):
+        if subprojects:
+            print('--include-subprojects option currently not supported with Mercurial')
+            return 1
+        names = create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts)
+    else:
+        print('Dist currently only works with Git or Mercurial repos')
+        return 1
+    if names is None:
+        return 1
+    rc = 0
+    if not options.no_tests:
+        # Check only one.
+        rc = check_dist(names[0], get_meson_command(), extra_meson_args, bld_root, priv_dir)
+    if rc == 0:
+        for name in names:
+            create_hash(name)
+            print('Created', name)
+    return rc
diff --git a/meson/mesonbuild/mesondata.py b/meson/mesonbuild/mesondata.py
new file mode 100644
index 000000000..43b7bde7a
--- /dev/null
+++ b/meson/mesonbuild/mesondata.py
@@ -0,0 +1,394 @@
+# Copyright 2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+####
+####  WARNING: This is an automatically generated file! Do not edit!
+####           Generated by tools/gen_data.py
+####
+
+
+# TODO: Remember to remove this also from tools/gen_data.py
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .environment import Environment
+
+######################
+# BEGIN Data section #
+######################
+
+file_0_data_preload_cmake = '''\
+if(MESON_PS_LOADED)
+  return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+cmake_policy(PUSH)
+cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+macro(meson_ps_disabled_function)
+  message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subprojects.\n"
+                  "This should not be an issue but may lead to compilation errors.")
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+  set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+  set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+  meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+  meson_ps_inspect_vars()
+  _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+  meson_ps_inspect_vars()
+  _add_custom_target(${ARGV})
+endmacro()
+
+macro(set_property)
+  meson_ps_inspect_vars()
+  _set_property(${ARGV})
+endmacro()
+
+function(set_source_files_properties)
+  set(FILES)
+  set(I 0)
+  set(PROPERTIES OFF)
+
+  while(I LESS ARGC)
+    if(NOT PROPERTIES)
+      if("${ARGV${I}}" STREQUAL "PROPERTIES")
+        set(PROPERTIES ON)
+      else()
+        list(APPEND FILES "${ARGV${I}}")
+      endif()
+
+      math(EXPR I "${I} + 1")
+    else()
+      set(ID_IDX ${I})
+      math(EXPR PROP_IDX "${ID_IDX} + 1")
+
+      set(ID   "${ARGV${ID_IDX}}")
+      set(PROP "${ARGV${PROP_IDX}}")
+
+      set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}")
+      math(EXPR I "${I} + 2")
+    endif()
+  endwhile()
+endfunction()
+
+# Disable some functions that would mess up the CMake meson integration
+macro(target_precompile_headers)
+  meson_ps_disabled_function(target_precompile_headers)
+endmacro()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property)
+meson_ps_reload_vars()
+
+cmake_policy(POP)
+'''
+
+file_1_data_CMakeLists_txt = '''\
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+  if ("${VERSION}" STREQUAL "")
+    find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+  else()
+    find_package("${NAME}" "${VERSION}" QUIET COMPONENTS ${COMPS})
+  endif()
+
+  # ARCHS has to be set via the CMD interface
+  if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND  OR  ${PACKAGE_NAME}_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  # Check the following variables:
+  # FOO_VERSION
+  # Foo_VERSION
+  # FOO_VERSION_STRING
+  # Foo_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED ${_packageName}_VERSION)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+    elseif(DEFINED ${_packageName}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # FOO_LIBRARIES
+  # Foo_LIBRARIES
+  # FOO_LIBS
+  # Foo_LIBS
+  set(libs)
+  if(DEFINED ${_packageName}_LIBRARIES)
+    set(libs ${_packageName}_LIBRARIES)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+    set(libs ${PACKAGE_NAME}_LIBRARIES)
+  elseif(DEFINED ${_packageName}_LIBS)
+    set(libs ${_packageName}_LIBS)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+    set(libs ${PACKAGE_NAME}_LIBS)
+  endif()
+
+  # Check the following variables:
+  # FOO_INCLUDE_DIRS
+  # Foo_INCLUDE_DIRS
+  # FOO_INCLUDES
+  # Foo_INCLUDES
+  # FOO_INCLUDE_DIR
+  # Foo_INCLUDE_DIR
+  set(includes)
+  if(DEFINED ${_packageName}_INCLUDE_DIRS)
+    set(includes ${_packageName}_INCLUDE_DIRS)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+  elseif(DEFINED ${_packageName}_INCLUDES)
+    set(includes ${_packageName}_INCLUDES)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+    set(includes ${PACKAGE_NAME}_INCLUDES)
+  elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+    set(includes ${_packageName}_INCLUDE_DIR)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # FOO_DEFINITIONS
+  # Foo_DEFINITIONS
+  set(definitions)
+  if(DEFINED ${_packageName}_DEFINITIONS)
+    set(definitions ${_packageName}_DEFINITIONS)
+  elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+    set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
+'''
+
+file_2_data_CMakeListsLLVM_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+  find_package(LLVM REQUIRED CONFIG QUIET)
+
+  # ARCHS has to be set via the CMD interface
+  if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  foreach(mod IN LISTS LLVM_MESON_MODULES)
+    # Reset variables
+    set(out_mods)
+    set(real_mods)
+
+    # Generate a lower and upper case version
+    string(TOLOWER "${mod}" mod_L)
+    string(TOUPPER "${mod}" mod_U)
+
+    # Get the mapped components
+    llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+    list(SORT              out_mods)
+    list(REMOVE_DUPLICATES out_mods)
+
+    # Make sure that the modules exist
+    foreach(i IN LISTS out_mods)
+      if(TARGET ${i})
+        list(APPEND real_mods ${i})
+      endif()
+    endforeach()
+
+    # Set the output variables
+    set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+    foreach(i IN LISTS real_mods)
+      set(MESON_TARGET_TO_LLVM_${i} ${mod})
+    endforeach()
+  endforeach()
+
+  # Check the following variables:
+  # LLVM_PACKAGE_VERSION
+  # LLVM_VERSION
+  # LLVM_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED LLVM_PACKAGE_VERSION)
+      set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+    elseif(DEFINED LLVM_VERSION)
+      set(PACKAGE_VERSION "${LLVM_VERSION}")
+    elseif(DEFINED LLVM_VERSION_STRING)
+      set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # LLVM_LIBRARIES
+  # LLVM_LIBS
+  set(libs)
+  if(DEFINED LLVM_LIBRARIES)
+    set(libs LLVM_LIBRARIES)
+  elseif(DEFINED LLVM_LIBS)
+    set(libs LLVM_LIBS)
+  endif()
+
+  # Check the following variables:
+  # LLVM_INCLUDE_DIRS
+  # LLVM_INCLUDES
+  # LLVM_INCLUDE_DIR
+  set(includes)
+  if(DEFINED LLVM_INCLUDE_DIRS)
+    set(includes LLVM_INCLUDE_DIRS)
+  elseif(DEFINED LLVM_INCLUDES)
+    set(includes LLVM_INCLUDES)
+  elseif(DEFINED LLVM_INCLUDE_DIR)
+    set(includes LLVM_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # LLVM_DEFINITIONS
+  set(definitions)
+  if(DEFINED LLVM_DEFINITIONS)
+    set(definitions LLVM_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
+'''
+
+file_3_data_CMakePathInfo_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+  file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+  foreach(dir ${implicit_dirs})
+    if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+      list(APPEND LIB_ARCH_LIST "${dir}")
+    endif()
+  endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
+'''
+
+
+####################
+# END Data section #
+####################
+
+class DataFile:
+    def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+        self.path = path
+        self.sha256sum = sha256sum
+        self.data = data
+
+    def write_once(self, path: Path) -> None:
+        if not path.exists():
+            path.write_text(self.data, encoding='utf-8')
+
+    def write_to_private(self, env: 'Environment') -> Path:
+        out_file = Path(env.scratch_dir) / 'data' / self.path.name
+        out_file.parent.mkdir(exist_ok=True)
+        self.write_once(out_file)
+        return out_file
+
+
+mesondata = {
+    'cmake/data/preload.cmake': DataFile(
+        Path('cmake/data/preload.cmake'),
+        'ce8f30159aab25b92c26c58a219a427d47838bfa0739475221d6c8993b4946e5',
+        file_0_data_preload_cmake,
+    ),
+    'dependencies/data/CMakeLists.txt': DataFile(
+        Path('dependencies/data/CMakeLists.txt'),
+        '4dca24afa13e9311f0598a6ac29690490819bd7d82cfdaa0a2fe5eea3c0fa0d5',
+        file_1_data_CMakeLists_txt,
+    ),
+    'dependencies/data/CMakeListsLLVM.txt': DataFile(
+        Path('dependencies/data/CMakeListsLLVM.txt'),
+        '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd',
+        file_2_data_CMakeListsLLVM_txt,
+    ),
+    'dependencies/data/CMakePathInfo.txt': DataFile(
+        Path('dependencies/data/CMakePathInfo.txt'),
+        '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2',
+        file_3_data_CMakePathInfo_txt,
+    ),
+}
diff --git a/meson/mesonbuild/mesonlib/__init__.py b/meson/mesonbuild/mesonlib/__init__.py
new file mode 100644
index 000000000..5b646b549
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/__init__.py
@@ -0,0 +1,30 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions and classes."""
+
+import os
+
+from .universal import *
+
+# Here we import either the posix implementations, the windows implementations,
+# or a generic no-op implementation
+if os.name == 'posix':
+    from .posix import *
+elif os.name == 'nt':
+    from .win32 import *
+else:
+    from .platform import *
diff --git a/meson/mesonbuild/mesonlib/platform.py b/meson/mesonbuild/mesonlib/platform.py
new file mode 100644
index 000000000..cdd42b102
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/platform.py
@@ -0,0 +1,37 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""base classes providing no-op functionality.."""
+
+import os
+import typing as T
+
+from .. import mlog
+
+__all__ = ['BuildDirLock']
+
+# This needs to be inheritted by the specific implementations to make type
+# checking happy
+class BuildDirLock:
+
+    def __init__(self, builddir: str) -> None:
+        self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
+
+    def __enter__(self) -> None:
+        mlog.debug('Calling ther no-op version of BuildDirLock')
+
+    def __exit__(self, *args: T.Any) -> None:
+        pass
diff --git a/meson/mesonbuild/mesonlib/posix.py b/meson/mesonbuild/mesonlib/posix.py
new file mode 100644
index 000000000..67f9a442b
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/posix.py
@@ -0,0 +1,39 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Posix specific implementations of mesonlib functionality."""
+
+import fcntl
+import typing as T
+
+from .universal import MesonException
+from .platform import BuildDirLock as BuildDirLockBase
+
+__all__ = ['BuildDirLock']
+
+class BuildDirLock(BuildDirLockBase):
+
+    def __enter__(self) -> None:
+        self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+        try:
+            fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
+        except (BlockingIOError, PermissionError):
+            self.lockfile.close()
+            raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+
+    def __exit__(self, *args: T.Any) -> None:
+        fcntl.flock(self.lockfile, fcntl.LOCK_UN)
+        self.lockfile.close()
diff --git a/meson/mesonbuild/mesonlib/universal.py b/meson/mesonbuild/mesonlib/universal.py
new file mode 100644
index 000000000..d670d04c7
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/universal.py
@@ -0,0 +1,2190 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library of random helper functionality."""
+from pathlib import Path
+import argparse
+import enum
+import sys
+import stat
+import time
+import abc
+import platform, subprocess, operator, os, shlex, shutil, re
+import collections
+from functools import lru_cache, wraps, total_ordering
+from itertools import tee, filterfalse
+from tempfile import TemporaryDirectory
+import typing as T
+import uuid
+import textwrap
+
+from mesonbuild import mlog
+
+if T.TYPE_CHECKING:
+    from .._typing import ImmutableListProtocol
+    from ..build import ConfigurationData
+    from ..coredata import KeyedOptionDictType, UserOption
+    from ..compilers.compilers import CompilerType
+    from ..interpreterbase import ObjectHolder
+
+FileOrString = T.Union['File', str]
+
+_T = T.TypeVar('_T')
+_U = T.TypeVar('_U')
+
+__all__ = [
+    'GIT',
+    'an_unpicklable_object',
+    'python_command',
+    'project_meson_versions',
+    'HoldableObject',
+    'SecondLevelHolder',
+    'File',
+    'FileMode',
+    'GitException',
+    'LibType',
+    'MachineChoice',
+    'MesonException',
+    'MesonBugException',
+    'EnvironmentException',
+    'FileOrString',
+    'GitException',
+    'OptionKey',
+    'dump_conf_header',
+    'OptionOverrideProxy',
+    'OptionProxy',
+    'OptionType',
+    'OrderedSet',
+    'PerMachine',
+    'PerMachineDefaultable',
+    'PerThreeMachine',
+    'PerThreeMachineDefaultable',
+    'ProgressBar',
+    'RealPathAction',
+    'TemporaryDirectoryWinProof',
+    'Version',
+    'check_direntry_issues',
+    'classify_unity_sources',
+    'current_vs_supports_modules',
+    'darwin_get_object_archs',
+    'default_libdir',
+    'default_libexecdir',
+    'default_prefix',
+    'detect_subprojects',
+    'detect_vcs',
+    'do_conf_file',
+    'do_conf_str',
+    'do_define',
+    'do_replacement',
+    'exe_exists',
+    'expand_arguments',
+    'extract_as_list',
+    'get_compiler_for_source',
+    'get_filenames_templates_dict',
+    'get_library_dirs',
+    'get_variable_regex',
+    'get_wine_shortpath',
+    'git',
+    'has_path_sep',
+    'is_aix',
+    'is_android',
+    'is_ascii_string',
+    'is_cygwin',
+    'is_debianlike',
+    'is_dragonflybsd',
+    'is_freebsd',
+    'is_haiku',
+    'is_hurd',
+    'is_irix',
+    'is_linux',
+    'is_netbsd',
+    'is_openbsd',
+    'is_osx',
+    'is_qnx',
+    'is_sunos',
+    'is_windows',
+    'is_wsl',
+    'iter_regexin_iter',
+    'join_args',
+    'listify',
+    'partition',
+    'path_is_in_root',
+    'Popen_safe',
+    'quiet_git',
+    'quote_arg',
+    'relative_to_if_possible',
+    'relpath',
+    'replace_if_different',
+    'run_once',
+    'get_meson_command',
+    'set_meson_command',
+    'split_args',
+    'stringlistify',
+    'substitute_values',
+    'substring_is_in_list',
+    'typeslistify',
+    'verbose_git',
+    'version_compare',
+    'version_compare_condition_with_min',
+    'version_compare_many',
+    'search_version',
+    'windows_proof_rm',
+    'windows_proof_rmtree',
+]
+
+
+# TODO: this is such a hack, this really should be either in coredata or in the
+# interpreter
+# {subproject: project_meson_version}
+project_meson_versions = collections.defaultdict(str)  # type: T.DefaultDict[str, str]
+
+
+from glob import glob
+
+if os.path.basename(sys.executable) == 'meson.exe':
+    # In Windows and using the MSI installed executable.
+    python_command = [sys.executable, 'runpython']
+else:
+    python_command = [sys.executable]
+_meson_command = None
+
+class MesonException(Exception):
+    '''Exceptions thrown by Meson'''
+
+    def __init__(self, *args: object, file: T.Optional[str] = None,
+                 lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
+        super().__init__(*args)
+        self.file = file
+        self.lineno = lineno
+        self.colno = colno
+
+
+class MesonBugException(MesonException):
+    '''Exceptions thrown when there is a clear Meson bug that should be reported'''
+
+    def __init__(self, msg: str, file: T.Optional[str] = None,
+                 lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
+        super().__init__(msg + '\n\n    This is a Meson bug and should be reported!',
+                         file=file, lineno=lineno, colno=colno)
+
+class EnvironmentException(MesonException):
+    '''Exceptions thrown while processing and creating the build environment'''
+
+class GitException(MesonException):
+    def __init__(self, msg: str, output: T.Optional[str] = None):
+        super().__init__(msg)
+        self.output = output.strip() if output else ''
+
+GIT = shutil.which('git')
+def git(cmd: T.List[str], workingdir: str, check: bool = False, **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+    cmd = [GIT] + cmd
+    p, o, e = Popen_safe(cmd, cwd=workingdir, **kwargs)
+    if check and p.returncode != 0:
+        raise GitException('Git command failed: ' + str(cmd), e)
+    return p, o, e
+
+def quiet_git(cmd: T.List[str], workingdir: str, check: bool = False) -> T.Tuple[bool, str]:
+    if not GIT:
+        m = 'Git program not found.'
+        if check:
+            raise GitException(m)
+        return False, m
+    p, o, e = git(cmd, workingdir, check)
+    if p.returncode != 0:
+        return False, e
+    return True, o
+
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+    if not GIT:
+        m = 'Git program not found.'
+        if check:
+            raise GitException(m)
+        return False
+    p, _, _ = git(cmd, workingdir, check, stdout=None, stderr=None)
+    return p.returncode == 0
+
+def set_meson_command(mainfile: str) -> None:
+    global python_command
+    global _meson_command
+    # On UNIX-like systems `meson` is a Python script
+    # On Windows `meson` and `meson.exe` are wrapper exes
+    if not mainfile.endswith('.py'):
+        _meson_command = [mainfile]
+    elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'):
+        # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain
+        _meson_command = python_command + ['-m', 'mesonbuild.mesonmain']
+    else:
+        # Either run uninstalled, or full path to meson-script.py
+        _meson_command = python_command + [mainfile]
+    # We print this value for unit tests.
+    if 'MESON_COMMAND_TESTS' in os.environ:
+        mlog.log(f'meson_command is {_meson_command!r}')
+
+
+def get_meson_command() -> T.Optional[T.List[str]]:
+    return _meson_command
+
+
+def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
+    try:
+        if isinstance(astring, str):
+            astring.encode('ascii')
+        elif isinstance(astring, bytes):
+            astring.decode('ascii')
+    except UnicodeDecodeError:
+        return False
+    return True
+
+
+def check_direntry_issues(direntry_array: T.Union[T.List[T.Union[str, bytes]], str, bytes]) -> None:
+    import locale
+    # Warn if the locale is not UTF-8. This can cause various unfixable issues
+    # such as os.stat not being able to decode filenames with unicode in them.
+    # There is no way to reset both the preferred encoding and the filesystem
+    # encoding, so we can just warn about it.
+    e = locale.getpreferredencoding()
+    if e.upper() != 'UTF-8' and not is_windows():
+        if not isinstance(direntry_array, list):
+            direntry_array = [direntry_array]
+        for de in direntry_array:
+            if is_ascii_string(de):
+                continue
+            mlog.warning(textwrap.dedent(f'''
+                You are using {e!r} which is not a Unicode-compatible
+                locale but you are trying to access a file system entry called {de!r} which is
+                not pure ASCII. This may cause problems.
+                '''), file=sys.stderr)
+
+
+# Put this in objects that should not get dumped to pickle files
+# by accident.
+import threading
+an_unpicklable_object = threading.Lock()
+
+class HoldableObject(metaclass=abc.ABCMeta):
+    ''' Dummy base class for all objects that can be
+        held by an interpreter.baseobjects.ObjectHolder '''
+
+class SecondLevelHolder(HoldableObject, metaclass=abc.ABCMeta):
+    ''' A second level object holder. The primary purpose
+        of such objects is to hold multiple objects with one
+        default option. '''
+
+    @abc.abstractmethod
+    def get_default_object(self) -> HoldableObject: ...
+
+class FileMode:
+    # The first triad is for owner permissions, the second for group permissions,
+    # and the third for others (everyone else).
+    # For the 1st character:
+    #  'r' means can read
+    #  '-' means not allowed
+    # For the 2nd character:
+    #  'w' means can write
+    #  '-' means not allowed
+    # For the 3rd character:
+    #  'x' means can execute
+    #  's' means can execute and setuid/setgid is set (owner/group triads only)
+    #  'S' means cannot execute and setuid/setgid is set (owner/group triads only)
+    #  't' means can execute and sticky bit is set ("others" triads only)
+    #  'T' means cannot execute and sticky bit is set ("others" triads only)
+    #  '-' means none of these are allowed
+    #
+    # The meanings of 'rwx' perms is not obvious for directories; see:
+    # https://www.hackinglinuxexposed.com/articles/20030424.html
+    #
+    # For information on this notation such as setuid/setgid/sticky bits, see:
+    # https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
+    symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
+                                      '[r-][w-][xsS-]' # Group perms
+                                      '[r-][w-][xtT-]') # Others perms
+
+    def __init__(self, perms: T.Optional[str] = None, owner: T.Union[str, int, None] = None,
+                 group: T.Union[str, int, None] = None):
+        self.perms_s = perms
+        self.perms = self.perms_s_to_bits(perms)
+        self.owner = owner
+        self.group = group
+
+    def __repr__(self) -> str:
+        ret = ' int:
+        '''
+        Does the opposite of stat.filemode(), converts strings of the form
+        'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
+        '''
+        if perms_s is None:
+            # No perms specified, we will not touch the permissions
+            return -1
+        eg = 'rwxr-xr-x'
+        if not isinstance(perms_s, str):
+            raise MesonException(f'Install perms must be a string. For example, {eg!r}')
+        if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
+            raise MesonException(f'File perms {perms_s!r} must be exactly 9 chars. For example, {eg!r}')
+        perms = 0
+        # Owner perms
+        if perms_s[0] == 'r':
+            perms |= stat.S_IRUSR
+        if perms_s[1] == 'w':
+            perms |= stat.S_IWUSR
+        if perms_s[2] == 'x':
+            perms |= stat.S_IXUSR
+        elif perms_s[2] == 'S':
+            perms |= stat.S_ISUID
+        elif perms_s[2] == 's':
+            perms |= stat.S_IXUSR
+            perms |= stat.S_ISUID
+        # Group perms
+        if perms_s[3] == 'r':
+            perms |= stat.S_IRGRP
+        if perms_s[4] == 'w':
+            perms |= stat.S_IWGRP
+        if perms_s[5] == 'x':
+            perms |= stat.S_IXGRP
+        elif perms_s[5] == 'S':
+            perms |= stat.S_ISGID
+        elif perms_s[5] == 's':
+            perms |= stat.S_IXGRP
+            perms |= stat.S_ISGID
+        # Others perms
+        if perms_s[6] == 'r':
+            perms |= stat.S_IROTH
+        if perms_s[7] == 'w':
+            perms |= stat.S_IWOTH
+        if perms_s[8] == 'x':
+            perms |= stat.S_IXOTH
+        elif perms_s[8] == 'T':
+            perms |= stat.S_ISVTX
+        elif perms_s[8] == 't':
+            perms |= stat.S_IXOTH
+            perms |= stat.S_ISVTX
+        return perms
+
+dot_C_dot_H_warning = """You are using .C or .H files in your project. This is deprecated.
+         Currently, Meson treats this as C++ code, but they
+            used to be treated as C code.
+         Note that the situation is a bit more complex if you are using the
+         Visual Studio compiler, as it treats .C files as C code, unless you add
+         the /TP compiler flag, but this is unreliable.
+         See https://github.com/mesonbuild/meson/pull/8747 for the discussions."""
+class File(HoldableObject):
+    def __init__(self, is_built: bool, subdir: str, fname: str):
+        if fname.endswith(".C") or fname.endswith(".H"):
+            mlog.warning(dot_C_dot_H_warning, once=True)
+        self.is_built = is_built
+        self.subdir = subdir
+        self.fname = fname
+        self.hash = hash((is_built, subdir, fname))
+
+    def __str__(self) -> str:
+        return self.relative_name()
+
+    def __repr__(self) -> str:
+        ret = ' 'File':
+        if not os.path.isfile(os.path.join(source_root, subdir, fname)):
+            raise MesonException('File %s does not exist.' % fname)
+        return File(False, subdir, fname)
+
+    @staticmethod
+    def from_built_file(subdir: str, fname: str) -> 'File':
+        return File(True, subdir, fname)
+
+    @staticmethod
+    def from_absolute_file(fname: str) -> 'File':
+        return File(False, '', fname)
+
+    @lru_cache(maxsize=None)
+    def rel_to_builddir(self, build_to_src: str) -> str:
+        if self.is_built:
+            return self.relative_name()
+        else:
+            return os.path.join(build_to_src, self.subdir, self.fname)
+
+    @lru_cache(maxsize=None)
+    def absolute_path(self, srcdir: str, builddir: str) -> str:
+        absdir = srcdir
+        if self.is_built:
+            absdir = builddir
+        return os.path.join(absdir, self.relative_name())
+
+    def endswith(self, ending: str) -> bool:
+        return self.fname.endswith(ending)
+
+    def split(self, s: str, maxsplit: int = -1) -> T.List[str]:
+        return self.fname.split(s, maxsplit=maxsplit)
+
+    def rsplit(self, s: str, maxsplit: int = -1) -> T.List[str]:
+        return self.fname.rsplit(s, maxsplit=maxsplit)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, File):
+            return NotImplemented
+        if self.hash != other.hash:
+            return False
+        return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
+
+    def __hash__(self) -> int:
+        return self.hash
+
+    @lru_cache(maxsize=None)
+    def relative_name(self) -> str:
+        return os.path.join(self.subdir, self.fname)
+
+
+def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) -> 'CompilerType':
+    """Given a set of compilers and a source, find the compiler for that source type."""
+    for comp in compilers:
+        if comp.can_compile(src):
+            return comp
+    raise MesonException(f'No specified compiler can handle file {src!s}')
+
+
+def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Iterable[str]) -> T.Dict['CompilerType', T.List[str]]:
+    compsrclist = {}  # type: T.Dict[CompilerType, T.List[str]]
+    for src in sources:
+        comp = get_compiler_for_source(compilers, src)
+        if comp not in compsrclist:
+            compsrclist[comp] = [src]
+        else:
+            compsrclist[comp].append(src)
+    return compsrclist
+
+
+class MachineChoice(enum.IntEnum):
+
+    """Enum class representing one of the two abstract machine names used in
+    most places: the build, and host, machines.
+    """
+
+    BUILD = 0
+    HOST = 1
+
+    def get_lower_case_name(self) -> str:
+        return PerMachine('build', 'host')[self]
+
+    def get_prefix(self) -> str:
+        return PerMachine('build.', '')[self]
+
+
+class PerMachine(T.Generic[_T]):
+    def __init__(self, build: _T, host: _T) -> None:
+        self.build = build
+        self.host = host
+
+    def __getitem__(self, machine: MachineChoice) -> _T:
+        return {
+            MachineChoice.BUILD:  self.build,
+            MachineChoice.HOST:   self.host,
+        }[machine]
+
+    def __setitem__(self, machine: MachineChoice, val: _T) -> None:
+        setattr(self, machine.get_lower_case_name(), val)
+
+    def miss_defaulting(self) -> "PerMachineDefaultable[T.Optional[_T]]":
+        """Unset definition duplicated from their previous to None
+
+        This is the inverse of ''default_missing''. By removing defaulted
+        machines, we can elaborate the original and then redefault them and thus
+        avoid repeating the elaboration explicitly.
+        """
+        unfreeze = PerMachineDefaultable() # type: PerMachineDefaultable[T.Optional[_T]]
+        unfreeze.build = self.build
+        unfreeze.host = self.host
+        if unfreeze.host == unfreeze.build:
+            unfreeze.host = None
+        return unfreeze
+
+    def __repr__(self) -> str:
+        return f'PerMachine({self.build!r}, {self.host!r})'
+
+
+class PerThreeMachine(PerMachine[_T]):
+    """Like `PerMachine` but includes `target` too.
+
+    It turns out just one thing do we need track the target machine. There's no
+    need to computer the `target` field so we don't bother overriding the
+    `__getitem__`/`__setitem__` methods.
+    """
+    def __init__(self, build: _T, host: _T, target: _T) -> None:
+        super().__init__(build, host)
+        self.target = target
+
+    def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
+        """Unset definition duplicated from their previous to None
+
+        This is the inverse of ''default_missing''. By removing defaulted
+        machines, we can elaborate the original and then redefault them and thus
+        avoid repeating the elaboration explicitly.
+        """
+        unfreeze = PerThreeMachineDefaultable() # type: PerThreeMachineDefaultable[T.Optional[_T]]
+        unfreeze.build = self.build
+        unfreeze.host = self.host
+        unfreeze.target = self.target
+        if unfreeze.target == unfreeze.host:
+            unfreeze.target = None
+        if unfreeze.host == unfreeze.build:
+            unfreeze.host = None
+        return unfreeze
+
+    def matches_build_machine(self, machine: MachineChoice) -> bool:
+        return self.build == self[machine]
+
+    def __repr__(self) -> str:
+        return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})'
+
+
+class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
+    """Extends `PerMachine` with the ability to default from `None`s.
+    """
+    def __init__(self, build: T.Optional[_T] = None, host: T.Optional[_T] = None) -> None:
+        super().__init__(build, host)
+
+    def default_missing(self) -> "PerMachine[_T]":
+        """Default host to build
+
+        This allows just specifying nothing in the native case, and just host in the
+        cross non-compiler case.
+        """
+        freeze = PerMachine(self.build, self.host)
+        if freeze.host is None:
+            freeze.host = freeze.build
+        return freeze
+
+    def __repr__(self) -> str:
+        return f'PerMachineDefaultable({self.build!r}, {self.host!r})'
+
+    @classmethod
+    def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
+        """Easy way to get a defaulted value
+
+        This allows simplifying the case where you can control whether host and
+        build are separate or not with a boolean. If the is_cross value is set
+        to true then the optional host value will be used, otherwise the host
+        will be set to the build value.
+        """
+        m = cls(build)
+        if is_cross:
+            m.host = host
+        return m.default_missing()
+
+
+
+class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
+    """Extends `PerThreeMachine` with the ability to default from `None`s.
+    """
+    def __init__(self) -> None:
+        PerThreeMachine.__init__(self, None, None, None)
+
+    def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
+        """Default host to build and target to host.
+
+        This allows just specifying nothing in the native case, just host in the
+        cross non-compiler case, and just target in the native-built
+        cross-compiler case.
+        """
+        freeze = PerThreeMachine(self.build, self.host, self.target)
+        if freeze.host is None:
+            freeze.host = freeze.build
+        if freeze.target is None:
+            freeze.target = freeze.host
+        return freeze
+
+    def __repr__(self) -> str:
+        return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})'
+
+
+def is_sunos() -> bool:
+    return platform.system().lower() == 'sunos'
+
+
+def is_osx() -> bool:
+    return platform.system().lower() == 'darwin'
+
+
+def is_linux() -> bool:
+    return platform.system().lower() == 'linux'
+
+
+def is_android() -> bool:
+    return platform.system().lower() == 'android'
+
+
+def is_haiku() -> bool:
+    return platform.system().lower() == 'haiku'
+
+
+def is_openbsd() -> bool:
+    return platform.system().lower() == 'openbsd'
+
+
+def is_windows() -> bool:
+    platname = platform.system().lower()
+    return platname == 'windows'
+
+def is_wsl() -> bool:
+    return is_linux() and 'microsoft' in platform.release().lower()
+
+def is_cygwin() -> bool:
+    return sys.platform == 'cygwin'
+
+
+def is_debianlike() -> bool:
+    return os.path.isfile('/etc/debian_version')
+
+
+def is_dragonflybsd() -> bool:
+    return platform.system().lower() == 'dragonfly'
+
+
+def is_netbsd() -> bool:
+    return platform.system().lower() == 'netbsd'
+
+
+def is_freebsd() -> bool:
+    return platform.system().lower() == 'freebsd'
+
+def is_irix() -> bool:
+    return platform.system().startswith('irix')
+
+def is_hurd() -> bool:
+    return platform.system().lower() == 'gnu'
+
+def is_qnx() -> bool:
+    return platform.system().lower() == 'qnx'
+
+def is_aix() -> bool:
+    return platform.system().lower() == 'aix'
+
+def exe_exists(arglist: T.List[str]) -> bool:
+    try:
+        if subprocess.run(arglist, timeout=10).returncode == 0:
+            return True
+    except (FileNotFoundError, subprocess.TimeoutExpired):
+        pass
+    return False
+
+
+@lru_cache(maxsize=None)
+def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]':
+    '''
+    For a specific object (executable, static library, dylib, etc), run `lipo`
+    to fetch the list of archs supported by it. Supports both thin objects and
+    'fat' objects.
+    '''
+    _, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
+    if not stdo:
+        mlog.debug(f'lipo {objpath}: {stderr}')
+        return None
+    stdo = stdo.rsplit(': ', 1)[1]
+    # Convert from lipo-style archs to meson-style CPUs
+    stdo = stdo.replace('i386', 'x86')
+    stdo = stdo.replace('arm64', 'aarch64')
+    # Add generic name for armv7 and armv7s
+    if 'armv7' in stdo:
+        stdo += ' arm'
+    return stdo.split()
+
+
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
+    vcs_systems = [
+        dict(name = 'git',        cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
+        dict(name = 'mercurial',  cmd = 'hg',  repo_dir = '.hg',  get_rev = 'hg id -i',               rev_regex = '(.*)', dep = '.hg/dirstate'),
+        dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info',               rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
+        dict(name = 'bazaar',     cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno',              rev_regex = '(.*)', dep = '.bzr'),
+    ]
+    if isinstance(source_dir, str):
+        source_dir = Path(source_dir)
+
+    parent_paths_and_self = collections.deque(source_dir.parents)
+    # Prepend the source directory to the front so we can check it;
+    # source_dir.parents doesn't include source_dir
+    parent_paths_and_self.appendleft(source_dir)
+    for curdir in parent_paths_and_self:
+        for vcs in vcs_systems:
+            if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+                vcs['wc_dir'] = str(curdir)
+                return vcs
+    return None
+
+def current_vs_supports_modules() -> bool:
+    vsver = os.environ.get('VSCMD_VER', '')
+    nums = vsver.split('.', 2)
+    major = int(nums[0])
+    if major >= 17:
+        return True
+    if major == 16 and int(nums[1]) >= 10:
+        return True
+    return vsver.startswith('16.9.0') and '-pre.' in vsver
+
+# a helper class which implements the same version ordering as RPM
+class Version:
+    def __init__(self, s: str) -> None:
+        self._s = s
+
+        # split into numeric, alphabetic and non-alphanumeric sequences
+        sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+
+        # non-alphanumeric separators are discarded
+        sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+
+        # numeric sequences are converted from strings to ints
+        sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
+
+        self._v = sequences3
+
+    def __str__(self) -> str:
+        return '{} (V={})'.format(self._s, str(self._v))
+
+    def __repr__(self) -> str:
+        return f''
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.lt)
+        return NotImplemented
+
+    def __gt__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.gt)
+        return NotImplemented
+
+    def __le__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.le)
+        return NotImplemented
+
+    def __ge__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.ge)
+        return NotImplemented
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self._v == other._v
+        return NotImplemented
+
+    def __ne__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self._v != other._v
+        return NotImplemented
+
+    def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
+        # compare each sequence in order
+        for ours, theirs in zip(self._v, other._v):
+            # sort a non-digit sequence before a digit sequence
+            ours_is_int = isinstance(ours, int)
+            theirs_is_int = isinstance(theirs, int)
+            if ours_is_int != theirs_is_int:
+                return comparator(ours_is_int, theirs_is_int)
+
+            if ours != theirs:
+                return comparator(ours, theirs)
+
+        # if equal length, all components have matched, so equal
+        # otherwise, the version with a suffix remaining is greater
+        return comparator(len(self._v), len(other._v))
+
+
+def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
+    if vstr2.startswith('>='):
+        cmpop = operator.ge
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('<='):
+        cmpop = operator.le
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('!='):
+        cmpop = operator.ne
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('=='):
+        cmpop = operator.eq
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('='):
+        cmpop = operator.eq
+        vstr2 = vstr2[1:]
+    elif vstr2.startswith('>'):
+        cmpop = operator.gt
+        vstr2 = vstr2[1:]
+    elif vstr2.startswith('<'):
+        cmpop = operator.lt
+        vstr2 = vstr2[1:]
+    else:
+        cmpop = operator.eq
+
+    return (cmpop, vstr2)
+
+
+def version_compare(vstr1: str, vstr2: str) -> bool:
+    (cmpop, vstr2) = _version_extract_cmpop(vstr2)
+    return cmpop(Version(vstr1), Version(vstr2))
+
+
+def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
+    if isinstance(conditions, str):
+        conditions = [conditions]
+    found = []
+    not_found = []
+    for req in conditions:
+        if not version_compare(vstr1, req):
+            not_found.append(req)
+        else:
+            found.append(req)
+    return not_found == [], not_found, found
+
+
+# determine if the minimum version satisfying the condition |condition| exceeds
+# the minimum version for a feature |minimum|
+def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
+    if condition.startswith('>='):
+        cmpop = operator.le
+        condition = condition[2:]
+    elif condition.startswith('<='):
+        return False
+    elif condition.startswith('!='):
+        return False
+    elif condition.startswith('=='):
+        cmpop = operator.le
+        condition = condition[2:]
+    elif condition.startswith('='):
+        cmpop = operator.le
+        condition = condition[1:]
+    elif condition.startswith('>'):
+        cmpop = operator.lt
+        condition = condition[1:]
+    elif condition.startswith('<'):
+        return False
+    else:
+        cmpop = operator.le
+
+    # Declaring a project(meson_version: '>=0.46') and then using features in
+    # 0.46.0 is valid, because (knowing the meson versioning scheme) '0.46.0' is
+    # the lowest version which satisfies the constraint '>=0.46'.
+    #
+    # But this will fail here, because the minimum version required by the
+    # version constraint ('0.46') is strictly less (in our version comparison)
+    # than the minimum version needed for the feature ('0.46.0').
+    #
+    # Map versions in the constraint of the form '0.46' to '0.46.0', to embed
+    # this knowledge of the meson versioning scheme.
+    condition = condition.strip()
+    if re.match(r'^\d+.\d+$', condition):
+        condition += '.0'
+
+    return T.cast(bool, cmpop(Version(minimum), Version(condition)))
+
+def search_version(text: str) -> str:
+    # Usually of the type 4.1.4 but compiler output may contain
+    # stuff like this:
+    # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
+    # Limiting major version number to two digits seems to work
+    # thus far. When we get to GCC 100, this will break, but
+    # if we are still relevant when that happens, it can be
+    # considered an achievement in itself.
+    #
+    # This regex is reaching magic levels. If it ever needs
+    # to be updated, do not complexify but convert to something
+    # saner instead.
+    # We'll demystify it a bit with a verbose definition.
+    version_regex = re.compile(r"""
+    (? str:
+    if is_debianlike():
+        try:
+            pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.DEVNULL)
+            (stdo, _) = pc.communicate()
+            if pc.returncode == 0:
+                archpath = stdo.decode().strip()
+                return 'lib/' + archpath
+        except Exception:
+            pass
+    if is_freebsd() or is_irix():
+        return 'lib'
+    if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
+        return 'lib64'
+    return 'lib'
+
+
+def default_libexecdir() -> str:
+    # There is no way to auto-detect this, so it must be set at build time
+    return 'libexec'
+
+
+def default_prefix() -> str:
+    return 'c:/' if is_windows() else '/usr/local'
+
+
+def get_library_dirs() -> T.List[str]:
+    if is_windows():
+        return ['C:/mingw/lib'] # TODO: get programmatically
+    if is_osx():
+        return ['/usr/lib'] # TODO: get programmatically
+    # The following is probably Debian/Ubuntu specific.
+    # /usr/local/lib is first because it contains stuff
+    # installed by the sysadmin and is probably more up-to-date
+    # than /usr/lib. If you feel that this search order is
+    # problematic, please raise the issue on the mailing list.
+    unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
+
+    if is_freebsd():
+        return unixdirs
+    # FIXME: this needs to be further genericized for aarch64 etc.
+    machine = platform.machine()
+    if machine in ('i386', 'i486', 'i586', 'i686'):
+        plat = 'i386'
+    elif machine.startswith('arm'):
+        plat = 'arm'
+    else:
+        plat = ''
+
+    # Solaris puts 32-bit libraries in the main /lib & /usr/lib directories
+    # and 64-bit libraries in platform specific subdirectories.
+    if is_sunos():
+        if machine == 'i86pc':
+            plat = 'amd64'
+        elif machine.startswith('sun4'):
+            plat = 'sparcv9'
+
+    usr_platdir = Path('/usr/lib/') / plat
+    if usr_platdir.is_dir():
+        unixdirs += [str(x) for x in (usr_platdir).iterdir() if x.is_dir()]
+    if os.path.exists('/usr/lib64'):
+        unixdirs.append('/usr/lib64')
+
+    lib_platdir = Path('/lib/') / plat
+    if lib_platdir.is_dir():
+        unixdirs += [str(x) for x in (lib_platdir).iterdir() if x.is_dir()]
+    if os.path.exists('/lib64'):
+        unixdirs.append('/lib64')
+
+    return unixdirs
+
+
+def has_path_sep(name: str, sep: str = '/\\') -> bool:
+    'Checks if any of the specified @sep path separators are in @name'
+    for each in sep:
+        if each in name:
+            return True
+    return False
+
+
+if is_windows():
+    # shlex.split is not suitable for splitting command line on Window (https://bugs.python.org/issue1724822);
+    # shlex.quote is similarly problematic. Below are "proper" implementations of these functions according to
+    # https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments and
+    # https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+
+    _whitespace = ' \t\n\r'
+    _find_unsafe_char = re.compile(fr'[{_whitespace}"]').search
+
+    def quote_arg(arg: str) -> str:
+        if arg and not _find_unsafe_char(arg):
+            return arg
+
+        result = '"'
+        num_backslashes = 0
+        for c in arg:
+            if c == '\\':
+                num_backslashes += 1
+            else:
+                if c == '"':
+                    # Escape all backslashes and the following double quotation mark
+                    num_backslashes = num_backslashes * 2 + 1
+
+                result += num_backslashes * '\\' + c
+                num_backslashes = 0
+
+        # Escape all backslashes, but let the terminating double quotation
+        # mark we add below be interpreted as a metacharacter
+        result += (num_backslashes * 2) * '\\' + '"'
+        return result
+
+    def split_args(cmd: str) -> T.List[str]:
+        result = []
+        arg = ''
+        num_backslashes = 0
+        num_quotes = 0
+        in_quotes = False
+        for c in cmd:
+            if c == '\\':
+                num_backslashes += 1
+            else:
+                if c == '"' and not (num_backslashes % 2):
+                    # unescaped quote, eat it
+                    arg += (num_backslashes // 2) * '\\'
+                    num_quotes += 1
+                    in_quotes = not in_quotes
+                elif c in _whitespace and not in_quotes:
+                    if arg or num_quotes:
+                        # reached the end of the argument
+                        result.append(arg)
+                        arg = ''
+                        num_quotes = 0
+                else:
+                    if c == '"':
+                        # escaped quote
+                        num_backslashes = (num_backslashes - 1) // 2
+
+                    arg += num_backslashes * '\\' + c
+
+                num_backslashes = 0
+
+        if arg or num_quotes:
+            result.append(arg)
+
+        return result
+else:
+    def quote_arg(arg: str) -> str:
+        return shlex.quote(arg)
+
+    def split_args(cmd: str) -> T.List[str]:
+        return shlex.split(cmd)
+
+
+def join_args(args: T.Iterable[str]) -> str:
+    return ' '.join([quote_arg(x) for x in args])
+
+
+def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
+                   confdata: 'ConfigurationData') -> T.Tuple[str, T.Set[str]]:
+    missing_variables = set()  # type: T.Set[str]
+    if variable_format == 'cmake':
+        start_tag = '${'
+        backslash_tag = '\\${'
+    else:
+        assert variable_format in ['meson', 'cmake@']
+        start_tag = '@'
+        backslash_tag = '\\@'
+
+    def variable_replace(match: T.Match[str]) -> str:
+        # Pairs of escape characters before '@' or '\@'
+        if match.group(0).endswith('\\'):
+            num_escapes = match.end(0) - match.start(0)
+            return '\\' * (num_escapes // 2)
+        # Single escape character and '@'
+        elif match.group(0) == backslash_tag:
+            return start_tag
+        # Template variable to be replaced
+        else:
+            varname = match.group(1)
+            var_str = ''
+            if varname in confdata:
+                (var, desc) = confdata.get(varname)
+                if isinstance(var, str):
+                    var_str = var
+                elif isinstance(var, int):
+                    var_str = str(var)
+                else:
+                    msg = f'Tried to replace variable {varname!r} value with ' \
+                          f'something other than a string or int: {var!r}'
+                    raise MesonException(msg)
+            else:
+                missing_variables.add(varname)
+            return var_str
+    return re.sub(regex, variable_replace, line), missing_variables
+
+def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', variable_format: str) -> str:
+    def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
+        arr = line.split()
+        define_value=[]
+        for token in arr[2:]:
+            try:
+                (v, desc) = confdata.get(token)
+                define_value += [str(v)]
+            except KeyError:
+                define_value += [token]
+        return ' '.join(define_value)
+
+    arr = line.split()
+    if variable_format == 'meson' and len(arr) != 2:
+        raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
+
+    varname = arr[1]
+    try:
+        (v, desc) = confdata.get(varname)
+    except KeyError:
+        return '/* #undef %s */\n' % varname
+    if isinstance(v, bool):
+        if v:
+            return '#define %s\n' % varname
+        else:
+            return '#undef %s\n' % varname
+    elif isinstance(v, int):
+        return '#define %s %d\n' % (varname, v)
+    elif isinstance(v, str):
+        if variable_format == 'meson':
+            result = v
+        else:
+            result = get_cmake_define(line, confdata)
+        result = f'#define {varname} {result}\n'
+        (result, missing_variable) = do_replacement(regex, result, variable_format, confdata)
+        return result
+    else:
+        raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
+
+def get_variable_regex(variable_format: str = 'meson') -> T.Pattern[str]:
+    # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
+    # Also allow escaping '@' with '\@'
+    if variable_format in ['meson', 'cmake@']:
+        regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+    elif variable_format == 'cmake':
+        regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
+    else:
+        raise MesonException(f'Format "{variable_format}" not handled')
+    return regex
+
+def do_conf_str (src: str, data: list, confdata: 'ConfigurationData', variable_format: str,
+                 encoding: str = 'utf-8') -> T.Tuple[T.List[str],T.Set[str], bool]:
+    def line_is_valid(line : str, variable_format: str) -> bool:
+        if variable_format == 'meson':
+            if '#cmakedefine' in line:
+                return False
+        else: #cmake format
+            if '#mesondefine' in line:
+                return False
+        return True
+
+    regex = get_variable_regex(variable_format)
+
+    search_token = '#mesondefine'
+    if variable_format != 'meson':
+        search_token = '#cmakedefine'
+
+    result = []
+    missing_variables = set()
+    # Detect when the configuration data is empty and no tokens were found
+    # during substitution so we can warn the user to use the `copy:` kwarg.
+    confdata_useless = not confdata.keys()
+    for line in data:
+        if line.startswith(search_token):
+            confdata_useless = False
+            line = do_define(regex, line, confdata, variable_format)
+        else:
+            if not line_is_valid(line,variable_format):
+                raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
+            line, missing = do_replacement(regex, line, variable_format, confdata)
+            missing_variables.update(missing)
+            if missing:
+                confdata_useless = False
+        result.append(line)
+
+    return result, missing_variables, confdata_useless
+
+def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_format: str,
+                 encoding: str = 'utf-8') -> T.Tuple[T.Set[str], bool]:
+    try:
+        with open(src, encoding=encoding, newline='') as f:
+            data = f.readlines()
+    except Exception as e:
+        raise MesonException(f'Could not read input file {src}: {e!s}')
+
+    (result, missing_variables, confdata_useless) = do_conf_str(src, data, confdata, variable_format, encoding)
+    dst_tmp = dst + '~'
+    try:
+        with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
+            f.writelines(result)
+    except Exception as e:
+        raise MesonException(f'Could not write output file {dst}: {e!s}')
+    shutil.copymode(src, dst_tmp)
+    replace_if_different(dst, dst_tmp)
+    return missing_variables, confdata_useless
+
+CONF_C_PRELUDE = '''/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+'''
+
+CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
+; Do not edit, your changes will be lost.
+
+'''
+
+def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: str) -> None:
+    if output_format == 'c':
+        prelude = CONF_C_PRELUDE
+        prefix = '#'
+    elif output_format == 'nasm':
+        prelude = CONF_NASM_PRELUDE
+        prefix = '%'
+
+    ofilename_tmp = ofilename + '~'
+    with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
+        ofile.write(prelude)
+        for k in sorted(cdata.keys()):
+            (v, desc) = cdata.get(k)
+            if desc:
+                if output_format == 'c':
+                    ofile.write('/* %s */\n' % desc)
+                elif output_format == 'nasm':
+                    for line in desc.split('\n'):
+                        ofile.write('; %s\n' % line)
+            if isinstance(v, bool):
+                if v:
+                    ofile.write(f'{prefix}define {k}\n\n')
+                else:
+                    ofile.write(f'{prefix}undef {k}\n\n')
+            elif isinstance(v, (int, str)):
+                ofile.write(f'{prefix}define {k} {v}\n\n')
+            else:
+                raise MesonException('Unknown data type in configuration file entry: ' + k)
+    replace_if_different(ofilename, ofilename_tmp)
+
+
+def replace_if_different(dst: str, dst_tmp: str) -> None:
+    # If contents are identical, don't touch the file to prevent
+    # unnecessary rebuilds.
+    different = True
+    try:
+        with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
+            if f1.read() == f2.read():
+                different = False
+    except FileNotFoundError:
+        pass
+    if different:
+        os.replace(dst_tmp, dst)
+    else:
+        os.unlink(dst_tmp)
+
+
+
+def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
+    '''
+    Returns a list with all args embedded in a list if they are not a list.
+    This function preserves order.
+    @flatten: Convert lists of lists to a flat list
+    '''
+    if not isinstance(item, list):
+        return [item]
+    result = []  # type: T.List[T.Any]
+    for i in item:
+        if flatten and isinstance(i, list):
+            result += listify(i, flatten=True)
+        else:
+            result.append(i)
+    return result
+
+
+def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
+    '''
+    Extracts all values from given dict_object and listifies them.
+    '''
+    fetch = dict_object.get
+    if pop:
+        fetch = dict_object.pop
+    # If there's only one key, we don't return a list with one element
+    return listify(fetch(key, []), flatten=True)
+
+
+def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
+                 types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
+    '''
+    Ensure that type(@item) is one of @types or a
+    list of items all of which are of type @types
+    '''
+    if isinstance(item, types):
+        item = T.cast(T.List[_T], [item])
+    if not isinstance(item, list):
+        raise MesonException('Item must be a list or one of {!r}, not {!r}'.format(types, type(item)))
+    for i in item:
+        if i is not None and not isinstance(i, types):
+            raise MesonException('List item must be one of {!r}, not {!r}'.format(types, type(i)))
+    return item
+
+
+def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
+    return typeslistify(item, str)
+
+
+def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
+    expended_args = []  # type: T.List[str]
+    for arg in args:
+        if not arg.startswith('@'):
+            expended_args.append(arg)
+            continue
+
+        args_file = arg[1:]
+        try:
+            with open(args_file, encoding='utf-8') as f:
+                extended_args = f.read().split()
+            expended_args += extended_args
+        except Exception as e:
+            mlog.error('Expanding command line arguments:',  args_file, 'not found')
+            mlog.exception(e)
+            return None
+    return expended_args
+
+
+def partition(pred: T.Callable[[_T], object], iterable: T.Iterable[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
+    """Use a predicate to partition entries into false entries and true
+    entries.
+
+    >>> x, y = partition(is_odd, range(10))
+    >>> (list(x), list(y))
+    ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+    """
+    t1, t2 = tee(iterable)
+    return filterfalse(pred, t1), filter(pred, t2)
+
+
+def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
+               stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+               stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+               **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+    import locale
+    encoding = locale.getpreferredencoding()
+    # Redirect stdin to DEVNULL otherwise the command run by us here might mess
+    # up the console and ANSI colors will stop working on Windows.
+    if 'stdin' not in kwargs:
+        kwargs['stdin'] = subprocess.DEVNULL
+    if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
+        p, o, e = Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
+    else:
+        p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
+                             stdout=stdout, stderr=stderr, **kwargs)
+        o, e = p.communicate(write)
+    # Sometimes the command that we run will call another command which will be
+    # without the above stdin workaround, so set the console mode again just in
+    # case.
+    mlog.setup_console()
+    return p, o, e
+
+
+def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
+                      stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+                      stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+                      **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+    p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
+                         stdout=stdout, stderr=stderr, **kwargs)
+    input_ = None  # type: T.Optional[bytes]
+    if write is not None:
+        input_ = write.encode('utf-8')
+    o, e = p.communicate(input_)
+    if o is not None:
+        if sys.stdout.encoding:
+            o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
+        else:
+            o = o.decode(errors='replace').replace('\r\n', '\n')
+    if e is not None:
+        if sys.stderr.encoding:
+            e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
+        else:
+            e = e.decode(errors='replace').replace('\r\n', '\n')
+    return p, o, e
+
+
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
+    '''
+    Takes each regular expression in @regexiter and tries to search for it in
+    every item in @initer. If there is a match, returns that match.
+    Else returns False.
+    '''
+    for regex in regexiter:
+        for ii in initer:
+            if not isinstance(ii, str):
+                continue
+            match = re.search(regex, ii)
+            if match:
+                return match.group()
+    return None
+
+
+def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, str]) -> None:
+    # Error checking
+    inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@']  # type: T.List[str]
+    outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@']                 # type: T.List[str]
+    if '@INPUT@' not in values:
+        # Error out if any input-derived templates are present in the command
+        match = iter_regexin_iter(inregex, command)
+        if match:
+            raise MesonException(f'Command cannot have {match!r}, since no input files were specified')
+    else:
+        if len(values['@INPUT@']) > 1:
+            # Error out if @PLAINNAME@ or @BASENAME@ is present in the command
+            match = iter_regexin_iter(inregex[1:], command)
+            if match:
+                raise MesonException(f'Command cannot have {match!r} when there is '
+                                     'more than one input file')
+        # Error out if an invalid @INPUTnn@ template was specified
+        for each in command:
+            if not isinstance(each, str):
+                continue
+            match2 = re.search(inregex[0], each)
+            if match2 and match2.group() not in values:
+                m = 'Command cannot have {!r} since there are only {!r} inputs'
+                raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
+    if '@OUTPUT@' not in values:
+        # Error out if any output-derived templates are present in the command
+        match = iter_regexin_iter(outregex, command)
+        if match:
+            m = 'Command cannot have {!r} since there are no outputs'
+            raise MesonException(m.format(match))
+    else:
+        # Error out if an invalid @OUTPUTnn@ template was specified
+        for each in command:
+            if not isinstance(each, str):
+                continue
+            match2 = re.search(outregex[0], each)
+            if match2 and match2.group() not in values:
+                m = 'Command cannot have {!r} since there are only {!r} outputs'
+                raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
+
+
+def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[str]:
+    '''
+    Substitute the template strings in the @values dict into the list of
+    strings @command and return a new list. For a full list of the templates,
+    see get_filenames_templates_dict()
+
+    If multiple inputs/outputs are given in the @values dictionary, we
+    substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
+    just a part of it, and in that case we substitute *all* of them.
+    '''
+    # Error checking
+    _substitute_values_check_errors(command, values)
+    # Substitution
+    outcmd = []  # type: T.List[str]
+    rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
+    value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
+    for vv in command:
+        if not isinstance(vv, str):
+            outcmd.append(vv)
+        elif '@INPUT@' in vv:
+            inputs = values['@INPUT@']
+            if vv == '@INPUT@':
+                outcmd += inputs
+            elif len(inputs) == 1:
+                outcmd.append(vv.replace('@INPUT@', inputs[0]))
+            else:
+                raise MesonException("Command has '@INPUT@' as part of a "
+                                     "string and more than one input file")
+        elif '@OUTPUT@' in vv:
+            outputs = values['@OUTPUT@']
+            if vv == '@OUTPUT@':
+                outcmd += outputs
+            elif len(outputs) == 1:
+                outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
+            else:
+                raise MesonException("Command has '@OUTPUT@' as part of a "
+                                     "string and more than one output file")
+        # Append values that are exactly a template string.
+        # This is faster than a string replace.
+        elif vv in values:
+            outcmd.append(values[vv])
+        # Substitute everything else with replacement
+        elif value_rx:
+            outcmd.append(value_rx.sub(lambda m: values[m.group(0)], vv))
+        else:
+            outcmd.append(vv)
+    return outcmd
+
+
+def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
+    '''
+    Create a dictionary with template strings as keys and values as values for
+    the following templates:
+
+    @INPUT@  - the full path to one or more input files, from @inputs
+    @OUTPUT@ - the full path to one or more output files, from @outputs
+    @OUTDIR@ - the full path to the directory containing the output files
+
+    If there is only one input file, the following keys are also created:
+
+    @PLAINNAME@ - the filename of the input file
+    @BASENAME@ - the filename of the input file with the extension removed
+
+    If there is more than one input file, the following keys are also created:
+
+    @INPUT0@, @INPUT1@, ... one for each input file
+
+    If there is more than one output file, the following keys are also created:
+
+    @OUTPUT0@, @OUTPUT1@, ... one for each output file
+    '''
+    values = {}  # type: T.Dict[str, T.Union[str, T.List[str]]]
+    # Gather values derived from the input
+    if inputs:
+        # We want to substitute all the inputs.
+        values['@INPUT@'] = inputs
+        for (ii, vv) in enumerate(inputs):
+            # Write out @INPUT0@, @INPUT1@, ...
+            values[f'@INPUT{ii}@'] = vv
+        if len(inputs) == 1:
+            # Just one value, substitute @PLAINNAME@ and @BASENAME@
+            values['@PLAINNAME@'] = plain = os.path.basename(inputs[0])
+            values['@BASENAME@'] = os.path.splitext(plain)[0]
+    if outputs:
+        # Gather values derived from the outputs, similar to above.
+        values['@OUTPUT@'] = outputs
+        for (ii, vv) in enumerate(outputs):
+            values[f'@OUTPUT{ii}@'] = vv
+        # Outdir should be the same for all outputs
+        values['@OUTDIR@'] = os.path.dirname(outputs[0])
+        # Many external programs fail on empty arguments.
+        if values['@OUTDIR@'] == '':
+            values['@OUTDIR@'] = '.'
+    return values
+
+
+def _make_tree_writable(topdir: str) -> None:
+    # Ensure all files and directories under topdir are writable
+    # (and readable) by owner.
+    for d, _, files in os.walk(topdir):
+        os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
+        for fname in files:
+            fpath = os.path.join(d, fname)
+            if os.path.isfile(fpath):
+                os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
+
+
+def windows_proof_rmtree(f: str) -> None:
+    # On Windows if anyone is holding a file open you can't
+    # delete it. As an example an anti virus scanner might
+    # be scanning files you are trying to delete. The only
+    # way to fix this is to try again and again.
+    delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+    writable = False
+    for d in delays:
+        try:
+            # Start by making the tree writable.
+            if not writable:
+                _make_tree_writable(f)
+                writable = True
+        except PermissionError:
+            time.sleep(d)
+            continue
+        try:
+            shutil.rmtree(f)
+            return
+        except FileNotFoundError:
+            return
+        except OSError:
+            time.sleep(d)
+    # Try one last time and throw if it fails.
+    shutil.rmtree(f)
+
+
+def windows_proof_rm(fpath: str) -> None:
+    """Like windows_proof_rmtree, but for a single file."""
+    if os.path.isfile(fpath):
+        os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
+    delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+    for d in delays:
+        try:
+            os.unlink(fpath)
+            return
+        except FileNotFoundError:
+            return
+        except OSError:
+            time.sleep(d)
+    os.unlink(fpath)
+
+
+class TemporaryDirectoryWinProof(TemporaryDirectory):
+    """
+    Like TemporaryDirectory, but cleans things up using
+    windows_proof_rmtree()
+    """
+
+    def __exit__(self, exc: T.Any, value: T.Any, tb: T.Any) -> None:
+        try:
+            super().__exit__(exc, value, tb)
+        except OSError:
+            windows_proof_rmtree(self.name)
+
+    def cleanup(self) -> None:
+        try:
+            super().cleanup()
+        except OSError:
+            windows_proof_rmtree(self.name)
+
+
+def detect_subprojects(spdir_name: str, current_dir: str = '',
+                       result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Optional[T.Dict[str, T.List[str]]]:
+    if result is None:
+        result = {}
+    spdir = os.path.join(current_dir, spdir_name)
+    if not os.path.exists(spdir):
+        return result
+    for trial in glob(os.path.join(spdir, '*')):
+        basename = os.path.basename(trial)
+        if trial == 'packagecache':
+            continue
+        append_this = True
+        if os.path.isdir(trial):
+            detect_subprojects(spdir_name, trial, result)
+        elif trial.endswith('.wrap') and os.path.isfile(trial):
+            basename = os.path.splitext(basename)[0]
+        else:
+            append_this = False
+        if append_this:
+            if basename in result:
+                result[basename].append(trial)
+            else:
+                result[basename] = [trial]
+    return result
+
+
+def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
+    for s in strlist:
+        if substr in s:
+            return True
+    return False
+
+
+class OrderedSet(T.MutableSet[_T]):
+    """A set that preserves the order in which items are added, by first
+    insertion.
+    """
+    def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
+        # typing.OrderedDict is new in 3.7.2, so we can't use that, but we can
+        # use MutableMapping, which is fine in this case.
+        self.__container = collections.OrderedDict()  # type: T.MutableMapping[_T, None]
+        if iterable:
+            self.update(iterable)
+
+    def __contains__(self, value: object) -> bool:
+        return value in self.__container
+
+    def __iter__(self) -> T.Iterator[_T]:
+        return iter(self.__container.keys())
+
+    def __len__(self) -> int:
+        return len(self.__container)
+
+    def __repr__(self) -> str:
+        # Don't print 'OrderedSet("")' for an empty set.
+        if self.__container:
+            return 'OrderedSet("{}")'.format(
+                '", "'.join(repr(e) for e in self.__container.keys()))
+        return 'OrderedSet()'
+
+    def __reversed__(self) -> T.Iterator[_T]:
+        # Mypy is complaining that sets cant be reversed, which is true for
+        # unordered sets, but this is an ordered, set so reverse() makes sense.
+        return reversed(self.__container.keys())  # type: ignore
+
+    def add(self, value: _T) -> None:
+        self.__container[value] = None
+
+    def discard(self, value: _T) -> None:
+        if value in self.__container:
+            del self.__container[value]
+
+    def move_to_end(self, value: _T, last: bool = True) -> None:
+        # Mypy does not know about move_to_end, because it is not part of MutableMapping
+        self.__container.move_to_end(value, last) # type: ignore
+
+    def pop(self, last: bool = True) -> _T:
+        # Mypy does not know about the last argument, because it is not part of MutableMapping
+        item, _ = self.__container.popitem(last)  # type: ignore
+        return item
+
+    def update(self, iterable: T.Iterable[_T]) -> None:
+        for item in iterable:
+            self.__container[item] = None
+
+    def difference(self, set_: T.Union[T.Set[_T], 'OrderedSet[_T]']) -> 'OrderedSet[_T]':
+        return type(self)(e for e in self if e not in set_)
+
+def relpath(path: str, start: str) -> str:
+    # On Windows a relative path can't be evaluated for paths on two different
+    # drives (i.e. c:\foo and f:\bar).  The only thing left to do is to use the
+    # original absolute path.
+    try:
+        return os.path.relpath(path, start)
+    except (TypeError, ValueError):
+        return path
+
+def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
+    # Check whether a path is within the root directory root
+    try:
+        if resolve:
+            path.resolve().relative_to(root.resolve())
+        else:
+            path.relative_to(root)
+    except ValueError:
+        return False
+    return True
+
+def relative_to_if_possible(path: Path, root: Path, resolve: bool = False) -> Path:
+    try:
+        if resolve:
+            return path.resolve().relative_to(root.resolve())
+        else:
+            return path.relative_to(root)
+    except ValueError:
+        return path
+
+class LibType(enum.IntEnum):
+
+    """Enumeration for library types."""
+
+    SHARED = 0
+    STATIC = 1
+    PREFER_SHARED = 2
+    PREFER_STATIC = 3
+
+
+class ProgressBarFallback:  # lgtm [py/iter-returns-non-self]
+    '''
+    Fallback progress bar implementation when tqdm is not found
+
+    Since this class is not an actual iterator, but only provides a minimal
+    fallback, it is safe to ignore the 'Iterator does not return self from
+    __iter__ method' warning.
+    '''
+    def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
+                 bar_type: T.Optional[str] = None, desc: T.Optional[str] = None):
+        if iterable is not None:
+            self.iterable = iter(iterable)
+            return
+        self.total = total
+        self.done = 0
+        self.printed_dots = 0
+        if self.total and bar_type == 'download':
+            print('Download size:', self.total)
+        if desc:
+            print(f'{desc}: ', end='')
+
+    # Pretend to be an iterator when called as one and don't print any
+    # progress
+    def __iter__(self) -> T.Iterator[str]:
+        return self.iterable
+
+    def __next__(self) -> str:
+        return next(self.iterable)
+
+    def print_dot(self) -> None:
+        print('.', end='')
+        sys.stdout.flush()
+        self.printed_dots += 1
+
+    def update(self, progress: int) -> None:
+        self.done += progress
+        if not self.total:
+            # Just print one dot per call if we don't have a total length
+            self.print_dot()
+            return
+        ratio = int(self.done / self.total * 10)
+        while self.printed_dots < ratio:
+            self.print_dot()
+
+    def close(self) -> None:
+        print('')
+
+try:
+    from tqdm import tqdm
+except ImportError:
+    # ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
+    ProgressBar = ProgressBarFallback  # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
+else:
+    class ProgressBarTqdm(tqdm):
+        def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
+            if bar_type == 'download':
+                kwargs.update({'unit': 'bytes', 'leave': True})
+            else:
+                kwargs.update({'leave': False})
+            kwargs['ncols'] = 100
+            super().__init__(*args, **kwargs)
+
+    ProgressBar = ProgressBarTqdm
+
+
+class RealPathAction(argparse.Action):
+    def __init__(self, option_strings: T.List[str], dest: str, default: str = '.', **kwargs: T.Any):
+        default = os.path.abspath(os.path.realpath(default))
+        super().__init__(option_strings, dest, nargs=None, default=default, **kwargs)
+
+    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+                 values: T.Union[str, T.Sequence[T.Any], None], option_string: str = None) -> None:
+        assert isinstance(values, str)
+        setattr(namespace, self.dest, os.path.abspath(os.path.realpath(values)))
+
+
+def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str]) -> str:
+    """Get A short version of @wine_paths to avoid reaching WINEPATH number
+    of char limit.
+    """
+
+    wine_paths = list(OrderedSet(wine_paths))
+
+    getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
+    with open(getShortPathScript, mode='w', encoding='utf-8') as f:
+        f.write("@ECHO OFF\nfor %%x in (%*) do (\n echo|set /p=;%~sx\n)\n")
+        f.flush()
+    try:
+        with open(os.devnull, 'w', encoding='utf-8') as stderr:
+            wine_path = subprocess.check_output(
+                winecmd +
+                ['cmd', '/C', getShortPathScript] + wine_paths,
+                stderr=stderr).decode('utf-8')
+    except subprocess.CalledProcessError as e:
+        print("Could not get short paths: %s" % e)
+        wine_path = ';'.join(wine_paths)
+    finally:
+        os.remove(getShortPathScript)
+    if len(wine_path) > 2048:
+        raise MesonException(
+            'WINEPATH size {} > 2048'
+            ' this will cause random failure.'.format(
+                len(wine_path)))
+
+    return wine_path.strip(';')
+
+
+def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
+    ret = []  # type: T.List[_T]
+
+    @wraps(func)
+    def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
+        if ret:
+            return ret[0]
+
+        val = func(*args, **kwargs)
+        ret.append(val)
+        return val
+
+    return wrapper
+
+
+class OptionProxy(T.Generic[_T]):
+    def __init__(self, value: _T, choices: T.Optional[T.List[str]] = None):
+        self.value = value
+        self.choices = choices
+
+    def set_value(self, v: _T) -> None:
+        # XXX: should this be an error
+        self.value = v
+
+
+class OptionOverrideProxy(collections.abc.MutableMapping):
+
+    '''Mimic an option list but transparently override selected option
+    values.
+    '''
+
+    # TODO: the typing here could be made more explicit using a TypeDict from
+    # python 3.8 or typing_extensions
+
+    def __init__(self, overrides: T.Dict['OptionKey', T.Any], *options: 'KeyedOptionDictType'):
+        self.overrides = overrides.copy()
+        self.options: T.Dict['OptionKey', UserOption] = {}
+        for o in options:
+            self.options.update(o)
+
+    def __getitem__(self, key: 'OptionKey') -> T.Union['UserOption', OptionProxy]:
+        if key in self.options:
+            opt = self.options[key]
+            if key in self.overrides:
+                return OptionProxy(opt.validate_value(self.overrides[key]), getattr(opt, 'choices', None))
+            return opt
+        raise KeyError('Option not found', key)
+
+    def __setitem__(self, key: 'OptionKey', value: T.Union['UserOption', OptionProxy]) -> None:
+        self.overrides[key] = value.value
+
+    def __delitem__(self, key: 'OptionKey') -> None:
+        del self.overrides[key]
+
+    def __iter__(self) -> T.Iterator['OptionKey']:
+        return iter(self.options)
+
+    def __len__(self) -> int:
+        return len(self.options)
+
+    def copy(self) -> 'OptionOverrideProxy':
+        return OptionOverrideProxy(self.overrides.copy(), self.options.copy())
+
+
+class OptionType(enum.Enum):
+
+    """Enum used to specify what kind of argument a thing is."""
+
+    BUILTIN = 0
+    BASE = 1
+    COMPILER = 2
+    PROJECT = 3
+    BACKEND = 4
+
+# This is copied from coredata. There is no way to share this, because this
+# is used in the OptionKey constructor, and the coredata lists are
+# OptionKeys...
+_BUILTIN_NAMES = {
+    'prefix',
+    'bindir',
+    'datadir',
+    'includedir',
+    'infodir',
+    'libdir',
+    'libexecdir',
+    'localedir',
+    'localstatedir',
+    'mandir',
+    'sbindir',
+    'sharedstatedir',
+    'sysconfdir',
+    'auto_features',
+    'backend',
+    'buildtype',
+    'debug',
+    'default_library',
+    'errorlogs',
+    'install_umask',
+    'layout',
+    'optimization',
+    'stdsplit',
+    'strip',
+    'unity',
+    'unity_size',
+    'warning_level',
+    'werror',
+    'wrap_mode',
+    'force_fallback_for',
+    'pkg_config_path',
+    'cmake_prefix_path',
+}
+
+
+def _classify_argument(key: 'OptionKey') -> OptionType:
+    """Classify arguments into groups so we know which dict to assign them to."""
+
+    if key.name.startswith('b_'):
+        return OptionType.BASE
+    elif key.lang is not None:
+        return OptionType.COMPILER
+    elif key.name in _BUILTIN_NAMES:
+        return OptionType.BUILTIN
+    elif key.name.startswith('backend_'):
+        assert key.machine is MachineChoice.HOST, str(key)
+        return OptionType.BACKEND
+    else:
+        assert key.machine is MachineChoice.HOST, str(key)
+        return OptionType.PROJECT
+
+
+@total_ordering
+class OptionKey:
+
+    """Represents an option key in the various option dictionaries.
+
+    This provides a flexible, powerful way to map option names from their
+    external form (things like subproject:build.option) to something that
+    internally easier to reason about and produce.
+    """
+
+    __slots__ = ['name', 'subproject', 'machine', 'lang', '_hash', 'type']
+
+    name: str
+    subproject: str
+    machine: MachineChoice
+    lang: T.Optional[str]
+    _hash: int
+    type: OptionType
+
+    def __init__(self, name: str, subproject: str = '',
+                 machine: MachineChoice = MachineChoice.HOST,
+                 lang: T.Optional[str] = None, _type: T.Optional[OptionType] = None):
+        # the _type option to the constructor is kinda private. We want to be
+        # able tos ave the state and avoid the lookup function when
+        # pickling/unpickling, but we need to be able to calculate it when
+        # constructing a new OptionKey
+        object.__setattr__(self, 'name', name)
+        object.__setattr__(self, 'subproject', subproject)
+        object.__setattr__(self, 'machine', machine)
+        object.__setattr__(self, 'lang', lang)
+        object.__setattr__(self, '_hash', hash((name, subproject, machine, lang)))
+        if _type is None:
+            _type = _classify_argument(self)
+        object.__setattr__(self, 'type', _type)
+
+    def __setattr__(self, key: str, value: T.Any) -> None:
+        raise AttributeError('OptionKey instances do not support mutation.')
+
+    def __getstate__(self) -> T.Dict[str, T.Any]:
+        return {
+            'name': self.name,
+            'subproject': self.subproject,
+            'machine': self.machine,
+            'lang': self.lang,
+            '_type': self.type,
+        }
+
+    def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
+        """De-serialize the state of a pickle.
+
+        This is very clever. __init__ is not a constructor, it's an
+        initializer, therefore it's safe to call more than once. We create a
+        state in the custom __getstate__ method, which is valid to pass
+        splatted to the initializer.
+        """
+        # Mypy doesn't like this, because it's so clever.
+        self.__init__(**state)  # type: ignore
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, OptionKey):
+            return (
+                self.name == other.name and
+                self.subproject == other.subproject and
+                self.machine is other.machine and
+                self.lang == other.lang)
+        return NotImplemented
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, OptionKey):
+            return (
+                self.name < other.name and
+                self.subproject < other.subproject and
+                self.machine < other.machine and
+                self.lang < other.lang)
+        return NotImplemented
+
+    def __str__(self) -> str:
+        out = self.name
+        if self.lang:
+            out = f'{self.lang}_{out}'
+        if self.machine is MachineChoice.BUILD:
+            out = f'build.{out}'
+        if self.subproject:
+            out = f'{self.subproject}:{out}'
+        return out
+
+    def __repr__(self) -> str:
+        return f'OptionKey({repr(self.name)}, {repr(self.subproject)}, {repr(self.machine)}, {repr(self.lang)})'
+
+    @classmethod
+    def from_string(cls, raw: str) -> 'OptionKey':
+        """Parse the raw command line format into a three part tuple.
+
+        This takes strings like `mysubproject:build.myoption` and Creates an
+        OptionKey out of them.
+        """
+        try:
+            subproject, raw2 = raw.split(':')
+        except ValueError:
+            subproject, raw2 = '', raw
+
+        if raw2.startswith('build.'):
+            raw3 = raw2.split('.', 1)[1]
+            for_machine = MachineChoice.BUILD
+        else:
+            raw3 = raw2
+            for_machine = MachineChoice.HOST
+
+        from ..compilers import all_languages
+        if any(raw3.startswith(f'{l}_') for l in all_languages):
+            lang, opt = raw3.split('_', 1)
+        else:
+            lang, opt = None, raw3
+        assert ':' not in opt
+        assert 'build.' not in opt
+
+        return cls(opt, subproject, for_machine, lang)
+
+    def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
+               machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '') -> 'OptionKey':
+        """Create a new copy of this key, but with alterted members.
+
+        For example:
+        >>> a = OptionKey('foo', '', MachineChoice.Host)
+        >>> b = OptionKey('foo', 'bar', MachineChoice.Host)
+        >>> b == a.evolve(subproject='bar')
+        True
+        """
+        # We have to be a little clever with lang here, because lang is valid
+        # as None, for non-compiler options
+        return OptionKey(
+            name if name is not None else self.name,
+            subproject if subproject is not None else self.subproject,
+            machine if machine is not None else self.machine,
+            lang if lang != '' else self.lang,
+        )
+
+    def as_root(self) -> 'OptionKey':
+        """Convenience method for key.evolve(subproject='')."""
+        return self.evolve(subproject='')
+
+    def as_build(self) -> 'OptionKey':
+        """Convenience method for key.evolve(machine=MachinceChoice.BUILD)."""
+        return self.evolve(machine=MachineChoice.BUILD)
+
+    def as_host(self) -> 'OptionKey':
+        """Convenience method for key.evolve(machine=MachinceChoice.HOST)."""
+        return self.evolve(machine=MachineChoice.HOST)
+
+    def is_backend(self) -> bool:
+        """Convenience method to check if this is a backend option."""
+        return self.type is OptionType.BACKEND
+
+    def is_builtin(self) -> bool:
+        """Convenience method to check if this is a builtin option."""
+        return self.type is OptionType.BUILTIN
+
+    def is_compiler(self) -> bool:
+        """Convenience method to check if this is a builtin option."""
+        return self.type is OptionType.COMPILER
+
+    def is_project(self) -> bool:
+        """Convenience method to check if this is a project option."""
+        return self.type is OptionType.PROJECT
+
+    def is_base(self) -> bool:
+        """Convenience method to check if this is a base option."""
+        return self.type is OptionType.BASE
diff --git a/meson/mesonbuild/mesonlib/win32.py b/meson/mesonbuild/mesonlib/win32.py
new file mode 100644
index 000000000..bc0caec94
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/win32.py
@@ -0,0 +1,39 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Windows specific implementations of mesonlib functionality."""
+
+import msvcrt
+import typing as T
+
+from .universal import MesonException
+from .platform import BuildDirLock as BuildDirLockBase
+
+__all__ = ['BuildDirLock']
+
+class BuildDirLock(BuildDirLockBase):
+
+    def __enter__(self) -> None:
+        self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+        try:
+            msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
+        except (BlockingIOError, PermissionError):
+            self.lockfile.close()
+            raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+
+    def __exit__(self, *args: T.Any) -> None:
+        msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
+        self.lockfile.close()
diff --git a/meson/mesonbuild/mesonmain.py b/meson/mesonbuild/mesonmain.py
new file mode 100644
index 000000000..8b7c9c176
--- /dev/null
+++ b/meson/mesonbuild/mesonmain.py
@@ -0,0 +1,329 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Work around some pathlib bugs...
+from . import _pathlib
+import sys
+sys.modules['pathlib'] = _pathlib
+
+import os.path
+import importlib
+import traceback
+import argparse
+import codecs
+import shutil
+
+from . import mesonlib
+from . import mlog
+from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile, mdevenv
+from .mesonlib import MesonException
+from .environment import detect_msys2_arch
+from .wrap import wraptool
+
+need_setup_vsenv = False
+
+bat_template = '''@ECHO OFF
+
+call "{}"
+
+ECHO {}
+SET
+'''
+
+# If on Windows and VS is installed but not set up in the environment,
+# set it to be runnable. In this way Meson can be directly invoked
+# from any shell, VS Code etc.
+def setup_vsenv() -> None:
+    import subprocess, json, pathlib
+    if not mesonlib.is_windows():
+        return
+    bat_placeholder = 'nananananananananananananananana'
+    # If an existing build tool chain exists in PATH -> do nothing.
+    if shutil.which('cc'):
+        return
+    if shutil.which('gcc'):
+        return
+    if shutil.which('clang'):
+        return
+    if shutil.which('clang-cl'):
+        return
+    if os.environ.get('OSTYPE', bat_placeholder) == 'cygwin':
+        return
+    if 'Visual Studio' in os.environ['PATH']:
+        return
+    # VSINSTALL is set when running setvars from a Visual Studio installation
+    # Tested with Visual Studio 2012 and 2017
+    if 'VSINSTALLDIR' in os.environ:
+        return
+    # Check explicitly for cl when on Windows
+    if shutil.which('cl.exe'):
+        return
+
+    root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
+    bat_locator_bin = pathlib.Path(root, 'Microsoft Visual Studio/Installer/vswhere.exe')
+    if not bat_locator_bin.exists():
+        return
+    bat_json = subprocess.check_output(
+        [
+            str(bat_locator_bin),
+            '-latest',
+            '-prerelease',
+            '-requiresAny',
+            '-requires', 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64',
+            '-products', '*',
+            '-utf8',
+            '-format',
+            'json'
+        ]
+    )
+    bat_info = json.loads(bat_json)
+    if not bat_info:
+        # VS installer instelled but not VS itself maybe?
+        return
+    print('Activating VS', bat_info[0]['catalog']['productDisplayVersion'])
+    bat_root = pathlib.Path(bat_info[0]['installationPath'])
+    bat_path = bat_root / 'VC/Auxiliary/Build/vcvars64.bat'
+    if not bat_path.exists():
+        return
+
+    bat_file = pathlib.Path.home() / 'vsdetect.bat'
+
+    bat_separator = '---SPLIT---'
+    bat_contents = bat_template.format(bat_path, bat_separator)
+    bat_file.write_text(bat_contents, encoding='utf-8')
+    try:
+        bat_output = subprocess.check_output(str(bat_file), universal_newlines=True)
+    finally:
+        bat_file.unlink()
+    bat_lines = bat_output.split('\n')
+    bat_separator_seen = False
+    for bat_line in bat_lines:
+        if bat_line == bat_separator:
+            bat_separator_seen = True
+            continue
+        if not bat_separator_seen:
+            continue
+        if not bat_line:
+            continue
+        k, v = bat_line.split('=', 1)
+        os.environ[k] = v
+    global need_setup_vsenv
+    need_setup_vsenv = True
+
+
+# Note: when adding arguments, please also add them to the completion
+# scripts in $MESONSRC/data/shell-completions/
+class CommandLineParser:
+    def __init__(self):
+        self.term_width = shutil.get_terminal_size().columns
+        self.formatter = lambda prog: argparse.HelpFormatter(prog, max_help_position=int(self.term_width / 2), width=self.term_width)
+
+        self.commands = {}
+        self.hidden_commands = []
+        self.parser = argparse.ArgumentParser(prog='meson', formatter_class=self.formatter)
+        self.subparsers = self.parser.add_subparsers(title='Commands', dest='command',
+                                                     description='If no command is specified it defaults to setup command.')
+        self.add_command('setup', msetup.add_arguments, msetup.run,
+                         help_msg='Configure the project')
+        self.add_command('configure', mconf.add_arguments, mconf.run,
+                         help_msg='Change project options',)
+        self.add_command('dist', mdist.add_arguments, mdist.run,
+                         help_msg='Generate release archive',)
+        self.add_command('install', minstall.add_arguments, minstall.run,
+                         help_msg='Install the project')
+        self.add_command('introspect', mintro.add_arguments, mintro.run,
+                         help_msg='Introspect project')
+        self.add_command('init', minit.add_arguments, minit.run,
+                         help_msg='Create a new project')
+        self.add_command('test', mtest.add_arguments, mtest.run,
+                         help_msg='Run tests')
+        self.add_command('wrap', wraptool.add_arguments, wraptool.run,
+                         help_msg='Wrap tools')
+        self.add_command('subprojects', msubprojects.add_arguments, msubprojects.run,
+                         help_msg='Manage subprojects')
+        self.add_command('help', self.add_help_arguments, self.run_help_command,
+                         help_msg='Print help of a subcommand')
+        self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formatter), rewriter.run,
+                         help_msg='Modify the project definition')
+        self.add_command('compile', mcompile.add_arguments, mcompile.run,
+                         help_msg='Build the project')
+        self.add_command('devenv', mdevenv.add_arguments, mdevenv.run,
+                         help_msg='Run commands in developer environment')
+
+        # Hidden commands
+        self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
+                         help_msg=argparse.SUPPRESS)
+        self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run,
+                         help_msg=argparse.SUPPRESS)
+
+    def add_command(self, name, add_arguments_func, run_func, help_msg, aliases=None):
+        aliases = aliases or []
+        # FIXME: Cannot have hidden subparser:
+        # https://bugs.python.org/issue22848
+        if help_msg == argparse.SUPPRESS:
+            p = argparse.ArgumentParser(prog='meson ' + name, formatter_class=self.formatter)
+            self.hidden_commands.append(name)
+        else:
+            p = self.subparsers.add_parser(name, help=help_msg, aliases=aliases, formatter_class=self.formatter)
+        add_arguments_func(p)
+        p.set_defaults(run_func=run_func)
+        for i in [name] + aliases:
+            self.commands[i] = p
+
+    def add_runpython_arguments(self, parser):
+        parser.add_argument('-c', action='store_true', dest='eval_arg', default=False)
+        parser.add_argument('script_file')
+        parser.add_argument('script_args', nargs=argparse.REMAINDER)
+
+    def run_runpython_command(self, options):
+        import runpy
+        if options.eval_arg:
+            exec(options.script_file)
+        else:
+            sys.argv[1:] = options.script_args
+            sys.path.insert(0, os.path.dirname(options.script_file))
+            runpy.run_path(options.script_file, run_name='__main__')
+        return 0
+
+    def add_help_arguments(self, parser):
+        parser.add_argument('command', nargs='?')
+
+    def run_help_command(self, options):
+        if options.command:
+            self.commands[options.command].print_help()
+        else:
+            self.parser.print_help()
+        return 0
+
+    def run(self, args):
+        # If first arg is not a known command, assume user wants to run the setup
+        # command.
+        known_commands = list(self.commands.keys()) + ['-h', '--help']
+        if not args or args[0] not in known_commands:
+            args = ['setup'] + args
+
+        # Hidden commands have their own parser instead of using the global one
+        if args[0] in self.hidden_commands:
+            command = args[0]
+            parser = self.commands[command]
+            args = args[1:]
+        else:
+            parser = self.parser
+
+        args = mesonlib.expand_arguments(args)
+        options = parser.parse_args(args)
+
+        try:
+            return options.run_func(options)
+        except MesonException as e:
+            mlog.exception(e)
+            logfile = mlog.shutdown()
+            if logfile is not None:
+                mlog.log("\nA full log can be found at", mlog.bold(logfile))
+            if os.environ.get('MESON_FORCE_BACKTRACE'):
+                raise
+            return 1
+        except Exception:
+            if os.environ.get('MESON_FORCE_BACKTRACE'):
+                raise
+            traceback.print_exc()
+            return 2
+        finally:
+            mlog.shutdown()
+
+def run_script_command(script_name, script_args):
+    # Map script name to module name for those that doesn't match
+    script_map = {'exe': 'meson_exe',
+                  'install': 'meson_install',
+                  'delsuffix': 'delwithsuffix',
+                  'gtkdoc': 'gtkdochelper',
+                  'hotdoc': 'hotdochelper',
+                  'regencheck': 'regen_checker'}
+    module_name = script_map.get(script_name, script_name)
+
+    try:
+        module = importlib.import_module('mesonbuild.scripts.' + module_name)
+    except ModuleNotFoundError as e:
+        mlog.exception(e)
+        return 1
+
+    try:
+        return module.run(script_args)
+    except MesonException as e:
+        mlog.error(f'Error in {script_name} helper script:')
+        mlog.exception(e)
+        return 1
+
+def ensure_stdout_accepts_unicode():
+    if sys.stdout.encoding and not sys.stdout.encoding.upper().startswith('UTF-'):
+        if sys.version_info >= (3, 7):
+            sys.stdout.reconfigure(errors='surrogateescape')
+        else:
+            sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach(),
+                                                   errors='surrogateescape')
+            sys.stdout.encoding = 'UTF-8'
+            if not hasattr(sys.stdout, 'buffer'):
+                sys.stdout.buffer = sys.stdout.raw if hasattr(sys.stdout, 'raw') else sys.stdout
+
+def run(original_args, mainfile):
+    if sys.version_info < (3, 6):
+        print('Meson works correctly only with python 3.6+.')
+        print(f'You have python {sys.version}.')
+        print('Please update your environment')
+        return 1
+
+    # Meson gets confused if stdout can't output Unicode, if the
+    # locale isn't Unicode, just force stdout to accept it. This tries
+    # to emulate enough of PEP 540 to work elsewhere.
+    ensure_stdout_accepts_unicode()
+
+    # https://github.com/mesonbuild/meson/issues/3653
+    if sys.platform.lower() == 'msys':
+        mlog.error('This python3 seems to be msys/python on MSYS2 Windows, which is known to have path semantics incompatible with Meson')
+        msys2_arch = detect_msys2_arch()
+        if msys2_arch:
+            mlog.error('Please install and use mingw-w64-i686-python3 and/or mingw-w64-x86_64-python3 with Pacman')
+        else:
+            mlog.error('Please download and use Python as detailed at: https://mesonbuild.com/Getting-meson.html')
+        return 2
+
+    # Set the meson command that will be used to run scripts and so on
+    mesonlib.set_meson_command(mainfile)
+
+    args = original_args[:]
+
+    # Special handling of internal commands called from backends, they don't
+    # need to go through argparse.
+    if len(args) >= 2 and args[0] == '--internal':
+        if args[1] == 'regenerate':
+            # Rewrite "meson --internal regenerate" command line to
+            # "meson --reconfigure"
+            args = ['--reconfigure'] + args[2:]
+        else:
+            return run_script_command(args[1], args[2:])
+
+    return CommandLineParser().run(args)
+
+def main():
+    setup_vsenv()
+    # Always resolve the command path so Ninja can find it for regen, tests, etc.
+    if 'meson.exe' in sys.executable:
+        assert(os.path.isabs(sys.executable))
+        launcher = sys.executable
+    else:
+        launcher = os.path.realpath(sys.argv[0])
+    return run(sys.argv[1:], launcher)
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/meson/mesonbuild/minit.py b/meson/mesonbuild/minit.py
new file mode 100644
index 000000000..124e6c671
--- /dev/null
+++ b/meson/mesonbuild/minit.py
@@ -0,0 +1,186 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Code that creates simple startup projects."""
+
+from pathlib import Path
+from enum import Enum
+import subprocess
+import shutil
+import sys
+import os
+import re
+from glob import glob
+from mesonbuild import mesonlib
+from mesonbuild.environment import detect_ninja
+from mesonbuild.templates.samplefactory import sameple_generator
+import typing as T
+
+if T.TYPE_CHECKING:
+    import argparse
+
+'''
+we currently have one meson template at this time.
+'''
+from mesonbuild.templates.mesontemplates import create_meson_build
+
+FORTRAN_SUFFIXES = {'.f', '.for', '.F', '.f90', '.F90'}
+LANG_SUFFIXES = {'.c', '.cc', '.cpp', '.cs', '.cu', '.d', '.m', '.mm', '.rs', '.java'} | FORTRAN_SUFFIXES
+LANG_SUPPORTED = {'c', 'cpp', 'cs', 'cuda', 'd', 'fortran', 'java', 'rust', 'objc', 'objcpp'}
+
+DEFAULT_PROJECT = 'executable'
+DEFAULT_VERSION = '0.1'
+class DEFAULT_TYPES(Enum):
+    EXE = 'executable'
+    LIB = 'library'
+
+INFO_MESSAGE = '''Sample project created. To build it run the
+following commands:
+
+meson setup builddir
+meson compile -C builddir
+'''
+
+
+def create_sample(options: 'argparse.Namespace') -> None:
+    '''
+    Based on what arguments are passed we check for a match in language
+    then check for project type and create new Meson samples project.
+    '''
+    sample_gen = sameple_generator(options)
+    if options.type == DEFAULT_TYPES['EXE'].value:
+        sample_gen.create_executable()
+    elif options.type == DEFAULT_TYPES['LIB'].value:
+        sample_gen.create_library()
+    else:
+        raise RuntimeError('Unreachable code')
+    print(INFO_MESSAGE)
+
+def autodetect_options(options: 'argparse.Namespace', sample: bool = False) -> None:
+    '''
+    Here we autodetect options for args not passed in so don't have to
+    think about it.
+    '''
+    if not options.name:
+        options.name = Path().resolve().stem
+        if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample:
+            raise SystemExit('Name of current directory "{}" is not usable as a sample project name.\n'
+                             'Specify a project name with --name.'.format(options.name))
+        print('Using "{}" (name of current directory) as project name.'
+              .format(options.name))
+    if not options.executable:
+        options.executable = options.name
+        print('Using "{}" (project name) as name of executable to build.'
+              .format(options.executable))
+    if sample:
+        # The rest of the autodetection is not applicable to generating sample projects.
+        return
+    if not options.srcfiles:
+        srcfiles = []
+        for f in (f for f in Path().iterdir() if f.is_file()):
+            if f.suffix in LANG_SUFFIXES:
+                srcfiles.append(f)
+        if not srcfiles:
+            raise SystemExit('No recognizable source files found.\n'
+                             'Run meson init in an empty directory to create a sample project.')
+        options.srcfiles = srcfiles
+        print("Detected source files: " + ' '.join(map(str, srcfiles)))
+    options.srcfiles = [Path(f) for f in options.srcfiles]
+    if not options.language:
+        for f in options.srcfiles:
+            if f.suffix == '.c':
+                options.language = 'c'
+                break
+            if f.suffix in ('.cc', '.cpp'):
+                options.language = 'cpp'
+                break
+            if f.suffix == '.cs':
+                options.language = 'cs'
+                break
+            if f.suffix == '.cu':
+                options.language = 'cuda'
+                break
+            if f.suffix == '.d':
+                options.language = 'd'
+                break
+            if f.suffix in FORTRAN_SUFFIXES:
+                options.language = 'fortran'
+                break
+            if f.suffix == '.rs':
+                options.language = 'rust'
+                break
+            if f.suffix == '.m':
+                options.language = 'objc'
+                break
+            if f.suffix == '.mm':
+                options.language = 'objcpp'
+                break
+            if f.suffix == '.java':
+                options.language = 'java'
+                break
+        if not options.language:
+            raise SystemExit("Can't autodetect language, please specify it with -l.")
+        print("Detected language: " + options.language)
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    '''
+    Here we add args for that the user can passed when making a new
+    Meson project.
+    '''
+    parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory")
+    parser.add_argument('-C', dest='wd', action=mesonlib.RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument("-n", "--name", help="project name. default: name of current directory")
+    parser.add_argument("-e", "--executable", help="executable name. default: project name")
+    parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
+    parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
+    parser.add_argument("-b", "--build", action='store_true', help="build after generation")
+    parser.add_argument("--builddir", default='build', help="directory for build")
+    parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
+    parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help=f"project type. default: {DEFAULT_PROJECT} based project")
+    parser.add_argument('--version', default=DEFAULT_VERSION, help=f"project version. default: {DEFAULT_VERSION}")
+
+def run(options: 'argparse.Namespace') -> int:
+    '''
+    Here we generate the new Meson sample project.
+    '''
+    if not Path(options.wd).exists():
+        sys.exit('Project source root directory not found. Run this command in source directory root.')
+    os.chdir(options.wd)
+
+    if not glob('*'):
+        autodetect_options(options, sample=True)
+        if not options.language:
+            print('Defaulting to generating a C language project.')
+            options.language = 'c'
+        create_sample(options)
+    else:
+        autodetect_options(options)
+        if Path('meson.build').is_file() and not options.force:
+            raise SystemExit('meson.build already exists. Use --force to overwrite.')
+        create_meson_build(options)
+    if options.build:
+        if Path(options.builddir).is_dir() and options.force:
+            print('Build directory already exists, deleting it.')
+            shutil.rmtree(options.builddir)
+        print('Building...')
+        cmd = mesonlib.get_meson_command() + [options.builddir]
+        ret = subprocess.run(cmd)
+        if ret.returncode:
+            raise SystemExit
+        cmd = detect_ninja() + ['-C', options.builddir]
+        ret = subprocess.run(cmd)
+        if ret.returncode:
+            raise SystemExit
+    return 0
diff --git a/meson/mesonbuild/minstall.py b/meson/mesonbuild/minstall.py
new file mode 100644
index 000000000..e753d94db
--- /dev/null
+++ b/meson/mesonbuild/minstall.py
@@ -0,0 +1,721 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from glob import glob
+from pathlib import Path
+import argparse
+import errno
+import os
+import pickle
+import shlex
+import shutil
+import subprocess
+import sys
+import typing as T
+
+from . import environment
+from .backend.backends import InstallData
+from .coredata import major_versions_differ, MesonVersionMismatchException
+from .coredata import version as coredata_version
+from .mesonlib import Popen_safe, RealPathAction, is_windows
+from .scripts import depfixer, destdir_join
+from .scripts.meson_exe import run_exe
+try:
+    from __main__ import __file__ as main_file
+except ImportError:
+    # Happens when running as meson.exe which is native Windows.
+    # This is only used for pkexec which is not, so this is fine.
+    main_file = None
+
+if T.TYPE_CHECKING:
+    from .mesonlib import FileMode
+
+    try:
+        from typing import Protocol
+    except AttributeError:
+        from typing_extensions import Protocol  # type: ignore
+
+    class ArgumentType(Protocol):
+        """Typing information for the object returned by argparse."""
+        no_rebuild: bool
+        only_changed: bool
+        profile: bool
+        quiet: bool
+        wd: str
+        destdir: str
+        dry_run: bool
+        skip_subprojects: str
+
+
+symlink_warning = '''Warning: trying to copy a symlink that points to a file. This will copy the file,
+but this will be changed in a future version of Meson to copy the symlink as is. Please update your
+build definitions so that it will not break when the change happens.'''
+
+selinux_updates: T.List[str] = []
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument('--profile-self', action='store_true', dest='profile',
+                        help=argparse.SUPPRESS)
+    parser.add_argument('--no-rebuild', default=False, action='store_true',
+                        help='Do not rebuild before installing.')
+    parser.add_argument('--only-changed', default=False, action='store_true',
+                        help='Only overwrite files that are older than the copied file.')
+    parser.add_argument('--quiet', default=False, action='store_true',
+                        help='Do not print every file that was installed.')
+    parser.add_argument('--destdir', default=None,
+                        help='Sets or overrides DESTDIR environment. (Since 0.57.0)')
+    parser.add_argument('--dry-run', '-n', action='store_true',
+                        help='Doesn\'t actually install, but print logs. (Since 0.57.0)')
+    parser.add_argument('--skip-subprojects', nargs='?', const='*', default='',
+                        help='Do not install files from given subprojects. (Since 0.58.0)')
+
+class DirMaker:
+    def __init__(self, lf: T.TextIO, makedirs: T.Callable[..., None]):
+        self.lf = lf
+        self.dirs: T.List[str] = []
+        self.makedirs_impl = makedirs
+
+    def makedirs(self, path: str, exist_ok: bool = False) -> None:
+        dirname = os.path.normpath(path)
+        dirs = []
+        while dirname != os.path.dirname(dirname):
+            if dirname in self.dirs:
+                # In dry-run mode the directory does not exist but we would have
+                # created it with all its parents otherwise.
+                break
+            if not os.path.exists(dirname):
+                dirs.append(dirname)
+            dirname = os.path.dirname(dirname)
+        self.makedirs_impl(path, exist_ok=exist_ok)
+
+        # store the directories in creation order, with the parent directory
+        # before the child directories. Future calls of makedir() will not
+        # create the parent directories, so the last element in the list is
+        # the last one to be created. That is the first one to be removed on
+        # __exit__
+        dirs.reverse()
+        self.dirs += dirs
+
+    def __enter__(self) -> 'DirMaker':
+        return self
+
+    def __exit__(self, exception_type: T.Type[Exception], value: T.Any, traceback: T.Any) -> None:
+        self.dirs.reverse()
+        for d in self.dirs:
+            append_to_log(self.lf, d)
+
+
+def is_executable(path: str, follow_symlinks: bool = False) -> bool:
+    '''Checks whether any of the "x" bits are set in the source file mode.'''
+    return bool(os.stat(path, follow_symlinks=follow_symlinks).st_mode & 0o111)
+
+
+def append_to_log(lf: T.TextIO, line: str) -> None:
+    lf.write(line)
+    if not line.endswith('\n'):
+        lf.write('\n')
+    lf.flush()
+
+
+def set_chown(path: str, user: T.Union[str, int, None] = None,
+              group: T.Union[str, int, None] = None,
+              dir_fd: T.Optional[int] = None, follow_symlinks: bool = True) -> None:
+    # shutil.chown will call os.chown without passing all the parameters
+    # and particularly follow_symlinks, thus we replace it temporary
+    # with a lambda with all the parameters so that follow_symlinks will
+    # be actually passed properly.
+    # Not nice, but better than actually rewriting shutil.chown until
+    # this python bug is fixed: https://bugs.python.org/issue18108
+    real_os_chown = os.chown
+
+    def chown(path: T.Union[int, str, 'os.PathLike[str]', bytes, 'os.PathLike[bytes]'],
+              uid: int, gid: int, *, dir_fd: T.Optional[int] = dir_fd,
+              follow_symlinks: bool = follow_symlinks) -> None:
+        """Override the default behavior of os.chown
+
+        Use a real function rather than a lambda to help mypy out. Also real
+        functions are faster.
+        """
+        real_os_chown(path, uid, gid, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+
+    try:
+        os.chown = chown
+        shutil.chown(path, user, group)
+    finally:
+        os.chown = real_os_chown
+
+
+def set_chmod(path: str, mode: int, dir_fd: T.Optional[int] = None,
+              follow_symlinks: bool = True) -> None:
+    try:
+        os.chmod(path, mode, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+    except (NotImplementedError, OSError, SystemError):
+        if not os.path.islink(path):
+            os.chmod(path, mode, dir_fd=dir_fd)
+
+
+def sanitize_permissions(path: str, umask: T.Union[str, int]) -> None:
+    # TODO: with python 3.8 or typing_extensions we could replace this with
+    # `umask: T.Union[T.Literal['preserve'], int]`, which would be mroe correct
+    if umask == 'preserve':
+        return
+    assert isinstance(umask, int), 'umask should only be "preserver" or an integer'
+    new_perms = 0o777 if is_executable(path, follow_symlinks=False) else 0o666
+    new_perms &= ~umask
+    try:
+        set_chmod(path, new_perms, follow_symlinks=False)
+    except PermissionError as e:
+        msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...'
+        print(msg.format(path, new_perms, e.strerror))
+
+
+def set_mode(path: str, mode: T.Optional['FileMode'], default_umask: T.Union[str, int]) -> None:
+    if mode is None or all(m is None for m in [mode.perms_s, mode.owner, mode.group]):
+        # Just sanitize permissions with the default umask
+        sanitize_permissions(path, default_umask)
+        return
+    # No chown() on Windows, and must set one of owner/group
+    if not is_windows() and (mode.owner is not None or mode.group is not None):
+        try:
+            set_chown(path, mode.owner, mode.group, follow_symlinks=False)
+        except PermissionError as e:
+            msg = '{!r}: Unable to set owner {!r} and group {!r}: {}, ignoring...'
+            print(msg.format(path, mode.owner, mode.group, e.strerror))
+        except LookupError:
+            msg = '{!r}: Non-existent owner {!r} or group {!r}: ignoring...'
+            print(msg.format(path, mode.owner, mode.group))
+        except OSError as e:
+            if e.errno == errno.EINVAL:
+                msg = '{!r}: Non-existent numeric owner {!r} or group {!r}: ignoring...'
+                print(msg.format(path, mode.owner, mode.group))
+            else:
+                raise
+    # Must set permissions *after* setting owner/group otherwise the
+    # setuid/setgid bits will get wiped by chmod
+    # NOTE: On Windows you can set read/write perms; the rest are ignored
+    if mode.perms_s is not None:
+        try:
+            set_chmod(path, mode.perms, follow_symlinks=False)
+        except PermissionError as e:
+            msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...'
+            print(msg.format(path, mode.perms_s, e.strerror))
+    else:
+        sanitize_permissions(path, default_umask)
+
+
+def restore_selinux_contexts() -> None:
+    '''
+    Restores the SELinux context for files in @selinux_updates
+
+    If $DESTDIR is set, do not warn if the call fails.
+    '''
+    try:
+        subprocess.check_call(['selinuxenabled'])
+    except (FileNotFoundError, NotADirectoryError, PermissionError, subprocess.CalledProcessError):
+        # If we don't have selinux or selinuxenabled returned 1, failure
+        # is ignored quietly.
+        return
+
+    if not shutil.which('restorecon'):
+        # If we don't have restorecon, failure is ignored quietly.
+        return
+
+    if not selinux_updates:
+        # If the list of files is empty, do not try to call restorecon.
+        return
+
+    with subprocess.Popen(['restorecon', '-F', '-f-', '-0'],
+                          stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
+        out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0')
+        if proc.returncode != 0 and not os.environ.get('DESTDIR'):
+            print('Failed to restore SELinux context of installed files...',
+                  'Standard output:', out.decode(),
+                  'Standard error:', err.decode(), sep='\n')
+
+
+def get_destdir_path(destdir: str, fullprefix: str, path: str) -> str:
+    if os.path.isabs(path):
+        output = destdir_join(destdir, path)
+    else:
+        output = os.path.join(fullprefix, path)
+    return output
+
+
+def check_for_stampfile(fname: str) -> str:
+    '''Some languages e.g. Rust have output files
+    whose names are not known at configure time.
+    Check if this is the case and return the real
+    file instead.'''
+    if fname.endswith('.so') or fname.endswith('.dll'):
+        if os.stat(fname).st_size == 0:
+            (base, suffix) = os.path.splitext(fname)
+            files = glob(base + '-*' + suffix)
+            if len(files) > 1:
+                print("Stale dynamic library files in build dir. Can't install.")
+                sys.exit(1)
+            if len(files) == 1:
+                return files[0]
+    elif fname.endswith('.a') or fname.endswith('.lib'):
+        if os.stat(fname).st_size == 0:
+            (base, suffix) = os.path.splitext(fname)
+            files = glob(base + '-*' + '.rlib')
+            if len(files) > 1:
+                print("Stale static library files in build dir. Can't install.")
+                sys.exit(1)
+            if len(files) == 1:
+                return files[0]
+    return fname
+
+
+class Installer:
+
+    def __init__(self, options: 'ArgumentType', lf: T.TextIO):
+        self.did_install_something = False
+        self.options = options
+        self.lf = lf
+        self.preserved_file_count = 0
+        self.dry_run = options.dry_run
+        # [''] means skip none,
+        # ['*'] means skip all,
+        # ['sub1', ...] means skip only those.
+        self.skip_subprojects = [i.strip() for i in options.skip_subprojects.split(',')]
+
+    def remove(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            os.remove(*args, **kwargs)
+
+    def symlink(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            os.symlink(*args, **kwargs)
+
+    def makedirs(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            os.makedirs(*args, **kwargs)
+
+    def copy(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copy(*args, **kwargs)
+
+    def copy2(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copy2(*args, **kwargs)
+
+    def copyfile(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copyfile(*args, **kwargs)
+
+    def copystat(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copystat(*args, **kwargs)
+
+    def fix_rpath(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            depfixer.fix_rpath(*args, **kwargs)
+
+    def set_chown(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            set_chown(*args, **kwargs)
+
+    def set_chmod(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            set_chmod(*args, **kwargs)
+
+    def sanitize_permissions(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            sanitize_permissions(*args, **kwargs)
+
+    def set_mode(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            set_mode(*args, **kwargs)
+
+    def restore_selinux_contexts(self) -> None:
+        if not self.dry_run:
+            restore_selinux_contexts()
+
+    def Popen_safe(self, *args: T.Any, **kwargs: T.Any) -> T.Tuple[int, str, str]:
+        if not self.dry_run:
+            p, o, e = Popen_safe(*args, **kwargs)
+            return p.returncode, o, e
+        return 0, '', ''
+
+    def run_exe(self, *args: T.Any, **kwargs: T.Any) -> int:
+        if not self.dry_run:
+            return run_exe(*args, **kwargs)
+        return 0
+
+    def install_subproject(self, subproject: str) -> bool:
+        if subproject and (subproject in self.skip_subprojects or '*' in self.skip_subprojects):
+            return False
+        return True
+
+    def log(self, msg: str) -> None:
+        if not self.options.quiet:
+            print(msg)
+
+    def should_preserve_existing_file(self, from_file: str, to_file: str) -> bool:
+        if not self.options.only_changed:
+            return False
+        # Always replace danging symlinks
+        if os.path.islink(from_file) and not os.path.isfile(from_file):
+            return False
+        from_time = os.stat(from_file).st_mtime
+        to_time = os.stat(to_file).st_mtime
+        return from_time <= to_time
+
+    def do_copyfile(self, from_file: str, to_file: str,
+                    makedirs: T.Optional[T.Tuple[T.Any, str]] = None) -> bool:
+        outdir = os.path.split(to_file)[0]
+        if not os.path.isfile(from_file) and not os.path.islink(from_file):
+            raise RuntimeError('Tried to install something that isn\'t a file:'
+                               '{!r}'.format(from_file))
+        # copyfile fails if the target file already exists, so remove it to
+        # allow overwriting a previous install. If the target is not a file, we
+        # want to give a readable error.
+        if os.path.exists(to_file):
+            if not os.path.isfile(to_file):
+                raise RuntimeError('Destination {!r} already exists and is not '
+                                   'a file'.format(to_file))
+            if self.should_preserve_existing_file(from_file, to_file):
+                append_to_log(self.lf, f'# Preserving old file {to_file}\n')
+                self.preserved_file_count += 1
+                return False
+            self.remove(to_file)
+        elif makedirs:
+            # Unpack tuple
+            dirmaker, outdir = makedirs
+            # Create dirs if needed
+            dirmaker.makedirs(outdir, exist_ok=True)
+        self.log(f'Installing {from_file} to {outdir}')
+        if os.path.islink(from_file):
+            if not os.path.exists(from_file):
+                # Dangling symlink. Replicate as is.
+                self.copy(from_file, outdir, follow_symlinks=False)
+            else:
+                # Remove this entire branch when changing the behaviour to duplicate
+                # symlinks rather than copying what they point to.
+                print(symlink_warning)
+                self.copy2(from_file, to_file)
+        else:
+            self.copy2(from_file, to_file)
+        selinux_updates.append(to_file)
+        append_to_log(self.lf, to_file)
+        return True
+
+    def do_copydir(self, data: InstallData, src_dir: str, dst_dir: str,
+                   exclude: T.Optional[T.Tuple[T.Set[str], T.Set[str]]],
+                   install_mode: 'FileMode', dm: DirMaker) -> None:
+        '''
+        Copies the contents of directory @src_dir into @dst_dir.
+
+        For directory
+            /foo/
+              bar/
+                excluded
+                foobar
+              file
+        do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}) creates
+            /dst/
+              dir/
+                bar/
+                  foobar
+                file
+
+        Args:
+            src_dir: str, absolute path to the source directory
+            dst_dir: str, absolute path to the destination directory
+            exclude: (set(str), set(str)), tuple of (exclude_files, exclude_dirs),
+                     each element of the set is a path relative to src_dir.
+        '''
+        if not os.path.isabs(src_dir):
+            raise ValueError(f'src_dir must be absolute, got {src_dir}')
+        if not os.path.isabs(dst_dir):
+            raise ValueError(f'dst_dir must be absolute, got {dst_dir}')
+        if exclude is not None:
+            exclude_files, exclude_dirs = exclude
+        else:
+            exclude_files = exclude_dirs = set()
+        for root, dirs, files in os.walk(src_dir):
+            assert os.path.isabs(root)
+            for d in dirs[:]:
+                abs_src = os.path.join(root, d)
+                filepart = os.path.relpath(abs_src, start=src_dir)
+                abs_dst = os.path.join(dst_dir, filepart)
+                # Remove these so they aren't visited by os.walk at all.
+                if filepart in exclude_dirs:
+                    dirs.remove(d)
+                    continue
+                if os.path.isdir(abs_dst):
+                    continue
+                if os.path.exists(abs_dst):
+                    print(f'Tried to copy directory {abs_dst} but a file of that name already exists.')
+                    sys.exit(1)
+                dm.makedirs(abs_dst)
+                self.copystat(abs_src, abs_dst)
+                self.sanitize_permissions(abs_dst, data.install_umask)
+            for f in files:
+                abs_src = os.path.join(root, f)
+                filepart = os.path.relpath(abs_src, start=src_dir)
+                if filepart in exclude_files:
+                    continue
+                abs_dst = os.path.join(dst_dir, filepart)
+                if os.path.isdir(abs_dst):
+                    print(f'Tried to copy file {abs_dst} but a directory of that name already exists.')
+                    sys.exit(1)
+                parent_dir = os.path.dirname(abs_dst)
+                if not os.path.isdir(parent_dir):
+                    dm.makedirs(parent_dir)
+                    self.copystat(os.path.dirname(abs_src), parent_dir)
+                # FIXME: what about symlinks?
+                self.do_copyfile(abs_src, abs_dst)
+                self.set_mode(abs_dst, install_mode, data.install_umask)
+
+    @staticmethod
+    def check_installdata(obj: InstallData) -> InstallData:
+        if not isinstance(obj, InstallData) or not hasattr(obj, 'version'):
+            raise MesonVersionMismatchException('', coredata_version)
+        if major_versions_differ(obj.version, coredata_version):
+            raise MesonVersionMismatchException(obj.version, coredata_version)
+        return obj
+
+    def do_install(self, datafilename: str) -> None:
+        with open(datafilename, 'rb') as ifile:
+            d = self.check_installdata(pickle.load(ifile))
+
+        # Override in the env because some scripts could be relying on it.
+        if self.options.destdir is not None:
+            os.environ['DESTDIR'] = self.options.destdir
+
+        destdir = os.environ.get('DESTDIR', '')
+        fullprefix = destdir_join(destdir, d.prefix)
+
+        if d.install_umask != 'preserve':
+            assert isinstance(d.install_umask, int)
+            os.umask(d.install_umask)
+
+        self.did_install_something = False
+        try:
+            with DirMaker(self.lf, self.makedirs) as dm:
+                self.install_subdirs(d, dm, destdir, fullprefix) # Must be first, because it needs to delete the old subtree.
+                self.install_targets(d, dm, destdir, fullprefix)
+                self.install_headers(d, dm, destdir, fullprefix)
+                self.install_man(d, dm, destdir, fullprefix)
+                self.install_data(d, dm, destdir, fullprefix)
+                self.restore_selinux_contexts()
+                self.run_install_script(d, destdir, fullprefix)
+                if not self.did_install_something:
+                    self.log('Nothing to install.')
+                if not self.options.quiet and self.preserved_file_count > 0:
+                    self.log('Preserved {} unchanged files, see {} for the full list'
+                             .format(self.preserved_file_count, os.path.normpath(self.lf.name)))
+        except PermissionError:
+            if shutil.which('pkexec') is not None and 'PKEXEC_UID' not in os.environ and destdir == '':
+                print('Installation failed due to insufficient permissions.')
+                print('Attempting to use polkit to gain elevated privileges...')
+                os.execlp('pkexec', 'pkexec', sys.executable, main_file, *sys.argv[1:],
+                          '-C', os.getcwd())
+            else:
+                raise
+
+    def install_subdirs(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for i in d.install_subdirs:
+            if not self.install_subproject(i.subproject):
+                continue
+            self.did_install_something = True
+            full_dst_dir = get_destdir_path(destdir, fullprefix, i.install_path)
+            self.log(f'Installing subdir {i.path} to {full_dst_dir}')
+            dm.makedirs(full_dst_dir, exist_ok=True)
+            self.do_copydir(d, i.path, full_dst_dir, i.exclude, i.install_mode, dm)
+
+    def install_data(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for i in d.data:
+            if not self.install_subproject(i.subproject):
+                continue
+            fullfilename = i.path
+            outfilename = get_destdir_path(destdir, fullprefix, i.install_path)
+            outdir = os.path.dirname(outfilename)
+            if self.do_copyfile(fullfilename, outfilename, makedirs=(dm, outdir)):
+                self.did_install_something = True
+            self.set_mode(outfilename, i.install_mode, d.install_umask)
+
+    def install_man(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for m in d.man:
+            if not self.install_subproject(m.subproject):
+                continue
+            full_source_filename = m.path
+            outfilename = get_destdir_path(destdir, fullprefix, m.install_path)
+            outdir = os.path.dirname(outfilename)
+            if self.do_copyfile(full_source_filename, outfilename, makedirs=(dm, outdir)):
+                self.did_install_something = True
+            self.set_mode(outfilename, m.install_mode, d.install_umask)
+
+    def install_headers(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for t in d.headers:
+            if not self.install_subproject(t.subproject):
+                continue
+            fullfilename = t.path
+            fname = os.path.basename(fullfilename)
+            outdir = get_destdir_path(destdir, fullprefix, t.install_path)
+            outfilename = os.path.join(outdir, fname)
+            if self.do_copyfile(fullfilename, outfilename, makedirs=(dm, outdir)):
+                self.did_install_something = True
+            self.set_mode(outfilename, t.install_mode, d.install_umask)
+
+    def run_install_script(self, d: InstallData, destdir: str, fullprefix: str) -> None:
+        env = {'MESON_SOURCE_ROOT': d.source_dir,
+               'MESON_BUILD_ROOT': d.build_dir,
+               'MESON_INSTALL_PREFIX': d.prefix,
+               'MESON_INSTALL_DESTDIR_PREFIX': fullprefix,
+               'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]),
+               }
+        if self.options.quiet:
+            env['MESON_INSTALL_QUIET'] = '1'
+
+        for i in d.install_scripts:
+            if not self.install_subproject(i.subproject):
+                continue
+            name = ' '.join(i.cmd_args)
+            if i.skip_if_destdir and destdir:
+                self.log(f'Skipping custom install script because DESTDIR is set {name!r}')
+                continue
+            self.did_install_something = True  # Custom script must report itself if it does nothing.
+            self.log(f'Running custom install script {name!r}')
+            try:
+                rc = self.run_exe(i, env)
+            except OSError:
+                print(f'FAILED: install script \'{name}\' could not be run, stopped')
+                # POSIX shells return 127 when a command could not be found
+                sys.exit(127)
+            if rc != 0:
+                print(f'FAILED: install script \'{name}\' exit code {rc}, stopped')
+                sys.exit(rc)
+
+    def install_targets(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for t in d.targets:
+            if not self.install_subproject(t.subproject):
+                continue
+            if not os.path.exists(t.fname):
+                # For example, import libraries of shared modules are optional
+                if t.optional:
+                    self.log(f'File {t.fname!r} not found, skipping')
+                    continue
+                else:
+                    raise RuntimeError(f'File {t.fname!r} could not be found')
+            file_copied = False # not set when a directory is copied
+            fname = check_for_stampfile(t.fname)
+            outdir = get_destdir_path(destdir, fullprefix, t.outdir)
+            outname = os.path.join(outdir, os.path.basename(fname))
+            final_path = os.path.join(d.prefix, t.outdir, os.path.basename(fname))
+            aliases = t.aliases
+            should_strip = t.strip
+            install_rpath = t.install_rpath
+            install_name_mappings = t.install_name_mappings
+            install_mode = t.install_mode
+            if not os.path.exists(fname):
+                raise RuntimeError(f'File {fname!r} could not be found')
+            elif os.path.isfile(fname):
+                file_copied = self.do_copyfile(fname, outname, makedirs=(dm, outdir))
+                self.set_mode(outname, install_mode, d.install_umask)
+                if should_strip and d.strip_bin is not None:
+                    if fname.endswith('.jar'):
+                        self.log('Not stripping jar target: {}'.format(os.path.basename(fname)))
+                        continue
+                    self.log('Stripping target {!r} using {}.'.format(fname, d.strip_bin[0]))
+                    returncode, stdo, stde = self.Popen_safe(d.strip_bin + [outname])
+                    if returncode != 0:
+                        print('Could not strip file.\n')
+                        print(f'Stdout:\n{stdo}\n')
+                        print(f'Stderr:\n{stde}\n')
+                        sys.exit(1)
+                if fname.endswith('.js'):
+                    # Emscripten outputs js files and optionally a wasm file.
+                    # If one was generated, install it as well.
+                    wasm_source = os.path.splitext(fname)[0] + '.wasm'
+                    if os.path.exists(wasm_source):
+                        wasm_output = os.path.splitext(outname)[0] + '.wasm'
+                        file_copied = self.do_copyfile(wasm_source, wasm_output)
+            elif os.path.isdir(fname):
+                fname = os.path.join(d.build_dir, fname.rstrip('/'))
+                outname = os.path.join(outdir, os.path.basename(fname))
+                dm.makedirs(outdir, exist_ok=True)
+                self.do_copydir(d, fname, outname, None, install_mode, dm)
+            else:
+                raise RuntimeError(f'Unknown file type for {fname!r}')
+            printed_symlink_error = False
+            for alias, to in aliases.items():
+                try:
+                    symlinkfilename = os.path.join(outdir, alias)
+                    try:
+                        self.remove(symlinkfilename)
+                    except FileNotFoundError:
+                        pass
+                    self.symlink(to, symlinkfilename)
+                    append_to_log(self.lf, symlinkfilename)
+                except (NotImplementedError, OSError):
+                    if not printed_symlink_error:
+                        print("Symlink creation does not work on this platform. "
+                              "Skipping all symlinking.")
+                        printed_symlink_error = True
+            if file_copied:
+                self.did_install_something = True
+                try:
+                    self.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
+                                         install_name_mappings, verbose=False)
+                except SystemExit as e:
+                    if isinstance(e.code, int) and e.code == 0:
+                        pass
+                    else:
+                        raise
+
+
+def rebuild_all(wd: str) -> bool:
+    if not (Path(wd) / 'build.ninja').is_file():
+        print('Only ninja backend is supported to rebuild the project before installation.')
+        return True
+
+    ninja = environment.detect_ninja()
+    if not ninja:
+        print("Can't find ninja, can't rebuild test.")
+        return False
+
+    ret = subprocess.run(ninja + ['-C', wd]).returncode
+    if ret != 0:
+        print(f'Could not rebuild {wd}')
+        return False
+
+    return True
+
+
+def run(opts: 'ArgumentType') -> int:
+    datafilename = 'meson-private/install.dat'
+    private_dir = os.path.dirname(datafilename)
+    log_dir = os.path.join(private_dir, '../meson-logs')
+    if not os.path.exists(os.path.join(opts.wd, datafilename)):
+        sys.exit('Install data not found. Run this command in build directory root.')
+    if not opts.no_rebuild:
+        if not rebuild_all(opts.wd):
+            sys.exit(-1)
+    os.chdir(opts.wd)
+    with open(os.path.join(log_dir, 'install-log.txt'), 'w', encoding='utf-8') as lf:
+        installer = Installer(opts, lf)
+        append_to_log(lf, '# List of files installed by Meson')
+        append_to_log(lf, '# Does not contain files installed by custom scripts.')
+        if opts.profile:
+            import cProfile as profile
+            fname = os.path.join(private_dir, 'profile-installer.log')
+            profile.runctx('installer.do_install(datafilename)', globals(), locals(), filename=fname)
+        else:
+            installer.do_install(datafilename)
+    return 0
diff --git a/meson/mesonbuild/mintro.py b/meson/mesonbuild/mintro.py
new file mode 100644
index 000000000..a79219b3d
--- /dev/null
+++ b/meson/mesonbuild/mintro.py
@@ -0,0 +1,543 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is a helper script for IDE developers. It allows you to
+extract information such as list of targets, files, compiler flags,
+tests and so on. All output is in JSON for simple parsing.
+
+Currently only works for the Ninja backend. Others use generated
+project files and don't need this info."""
+
+import collections
+import json
+from . import build, coredata as cdata
+from . import mesonlib
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
+from . import mlog
+from .backend import backends
+from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode
+from .interpreter import Interpreter
+from pathlib import Path, PurePath
+import typing as T
+import os
+import argparse
+
+from .mesonlib import OptionKey
+
+def get_meson_info_file(info_dir: str) -> str:
+    return os.path.join(info_dir, 'meson-info.json')
+
+def get_meson_introspection_version() -> str:
+    return '1.0.0'
+
+def get_meson_introspection_required_version() -> T.List[str]:
+    return ['>=1.0', '<2.0']
+
+class IntroCommand:
+    def __init__(self,
+                 desc: str,
+                 func: T.Optional[T.Callable[[], T.Union[dict, list]]] = None,
+                 no_bd: T.Optional[T.Callable[[IntrospectionInterpreter], T.Union[dict, list]]] = None) -> None:
+        self.desc = desc + '.'
+        self.func = func
+        self.no_bd = no_bd
+
+def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
+                                  builddata: T.Optional[build.Build] = None,
+                                  backend: T.Optional[backends.Backend] = None,
+                                  sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
+    if backend and builddata:
+        benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
+        testdata = backend.create_test_serialisation(builddata.get_tests())
+        installdata = backend.create_install_data()
+        interpreter = backend.interpreter
+    else:
+        benchmarkdata = testdata = installdata = None
+
+    # Enforce key order for argparse
+    return collections.OrderedDict([
+        ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+        ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+        ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+        ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+        ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)),
+        ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+        ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+        ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+        ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+        ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+    ])
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    intro_types = get_meson_introspection_types()
+    for key, val in intro_types.items():
+        flag = '--' + key.replace('_', '-')
+        parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
+
+    parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
+                        help='The backend to use for the --buildoptions introspection.')
+    parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
+                        help='Print all available information.')
+    parser.add_argument('-i', '--indent', action='store_true', dest='indent', default=False,
+                        help='Enable pretty printed JSON.')
+    parser.add_argument('-f', '--force-object-output', action='store_true', dest='force_dict', default=False,
+                        help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
+    parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
+    printer = AstJSONPrinter()
+    intr.ast.accept(printer)
+    return printer.result
+
+def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
+    res = {}
+    if installdata is not None:
+        for t in installdata.targets:
+            res[os.path.join(installdata.build_dir, t.fname)] = \
+                os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))
+            for alias in t.aliases.keys():
+                res[os.path.join(installdata.build_dir, alias)] = \
+                    os.path.join(installdata.prefix, t.outdir, os.path.basename(alias))
+        for i in installdata.data:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path)
+        for i in installdata.headers:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path, os.path.basename(i.path))
+        for i in installdata.man:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path)
+        for i in installdata.install_subdirs:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path)
+    return res
+
+def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
+    if coredata.get_option(OptionKey('layout')) == 'flat':
+        return 'meson-out'
+    else:
+        return subdir
+
+def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+    tlist = []  # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
+    root_dir = Path(intr.source_root)
+
+    def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
+        res = []  # type: T.List[Path]
+        for n in node_list:
+            args = []  # type: T.List[BaseNode]
+            if isinstance(n, FunctionNode):
+                args = list(n.args.arguments)
+                if n.func_name in build_target_functions:
+                    args.pop(0)
+            elif isinstance(n, ArrayNode):
+                args = n.args.arguments
+            elif isinstance(n, ArgumentNode):
+                args = n.arguments
+            for j in args:
+                if isinstance(j, StringNode):
+                    assert isinstance(j.value, str)
+                    res += [Path(j.value)]
+                elif isinstance(j, str):
+                    res += [Path(j)]
+        res = [root_dir / i['subdir'] / x for x in res]
+        res = [x.resolve() for x in res]
+        return res
+
+    for i in intr.targets:
+        sources = nodes_to_paths(i['sources'])
+        extra_f = nodes_to_paths(i['extra_files'])
+        outdir = get_target_dir(intr.coredata, i['subdir'])
+
+        tlist += [{
+            'name': i['name'],
+            'id': i['id'],
+            'type': i['type'],
+            'defined_in': i['defined_in'],
+            'filename': [os.path.join(outdir, x) for x in i['outputs']],
+            'build_by_default': i['build_by_default'],
+            'target_sources': [{
+                'language': 'unknown',
+                'compiler': [],
+                'parameters': [],
+                'sources': [str(x) for x in sources],
+                'generated_sources': []
+            }],
+            'extra_files': [str(x) for x in extra_f],
+            'subproject': None, # Subprojects are not supported
+            'installed': i['installed']
+        }]
+
+    return tlist
+
+def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]:
+    tlist = []  # type: T.List[T.Any]
+    build_dir = builddata.environment.get_build_dir()
+    src_dir = builddata.environment.get_source_dir()
+
+    # Fast lookup table for installation files
+    install_lookuptable = {}
+    for i in installdata.targets:
+        out = [os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname))]
+        out += [os.path.join(installdata.prefix, i.outdir, os.path.basename(x)) for x in i.aliases]
+        install_lookuptable[os.path.basename(i.fname)] = [str(PurePath(x)) for x in out]
+
+    for (idname, target) in builddata.get_targets().items():
+        if not isinstance(target, build.Target):
+            raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.')
+
+        outdir = get_target_dir(builddata.environment.coredata, target.subdir)
+        t = {
+            'name': target.get_basename(),
+            'id': idname,
+            'type': target.get_typename(),
+            'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, 'meson.build')),
+            'filename': [os.path.join(build_dir, outdir, x) for x in target.get_outputs()],
+            'build_by_default': target.build_by_default,
+            'target_sources': backend.get_introspection_data(idname, target),
+            'extra_files': [os.path.normpath(os.path.join(src_dir, x.subdir, x.fname)) for x in target.extra_files],
+            'subproject': target.subproject or None
+        }
+
+        if installdata and target.should_install():
+            t['installed'] = True
+            ifn = [install_lookuptable.get(x, [None]) for x in target.get_outputs()]
+            t['install_filename'] = [x for sublist in ifn for x in sublist]  # flatten the list
+        else:
+            t['installed'] = False
+        tlist.append(t)
+    return tlist
+
+def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+    subprojects = [i['name'] for i in intr.project_data['subprojects']]
+    return list_buildoptions(intr.coredata, subprojects)
+
+def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+    optlist = []  # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
+    subprojects = subprojects or []
+
+    dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS)
+    test_option_names = {OptionKey('errorlogs'),
+                         OptionKey('stdsplit')}
+
+    dir_options: 'cdata.KeyedOptionDictType' = {}
+    test_options: 'cdata.KeyedOptionDictType' = {}
+    core_options: 'cdata.KeyedOptionDictType' = {}
+    for k, v in coredata.options.items():
+        if k in dir_option_names:
+            dir_options[k] = v
+        elif k in test_option_names:
+            test_options[k] = v
+        elif k.is_builtin():
+            core_options[k] = v
+            if not v.yielding:
+                for s in subprojects:
+                    core_options[k.evolve(subproject=s)] = v
+
+    def add_keys(options: 'cdata.KeyedOptionDictType', section: str) -> None:
+        for key, opt in sorted(options.items()):
+            optdict = {'name': str(key), 'value': opt.value, 'section': section,
+                       'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'}
+            if isinstance(opt, cdata.UserStringOption):
+                typestr = 'string'
+            elif isinstance(opt, cdata.UserBooleanOption):
+                typestr = 'boolean'
+            elif isinstance(opt, cdata.UserComboOption):
+                optdict['choices'] = opt.choices
+                typestr = 'combo'
+            elif isinstance(opt, cdata.UserIntegerOption):
+                typestr = 'integer'
+            elif isinstance(opt, cdata.UserArrayOption):
+                typestr = 'array'
+                if opt.choices:
+                    optdict['choices'] = opt.choices
+            else:
+                raise RuntimeError("Unknown option type")
+            optdict['type'] = typestr
+            optdict['description'] = opt.description
+            optlist.append(optdict)
+
+    add_keys(core_options, 'core')
+    add_keys({k: v for k, v in coredata.options.items() if k.is_backend()}, 'backend')
+    add_keys({k: v for k, v in coredata.options.items() if k.is_base()}, 'base')
+    add_keys(
+        {k: v for k, v in sorted(coredata.options.items(), key=lambda i: i[0].machine) if k.is_compiler()},
+        'compiler',
+    )
+    add_keys(dir_options, 'directory')
+    add_keys({k: v for k, v in coredata.options.items() if k.is_project()}, 'user')
+    add_keys(test_options, 'test')
+    return optlist
+
+def find_buildsystem_files_list(src_dir: str) -> T.List[str]:
+    # I feel dirty about this. But only slightly.
+    filelist = []  # type: T.List[str]
+    for root, _, files in os.walk(src_dir):
+        for f in files:
+            if f == 'meson.build' or f == 'meson_options.txt':
+                filelist.append(os.path.relpath(os.path.join(root, f), src_dir))
+    return filelist
+
+def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]:
+    src_dir = builddata.environment.get_source_dir()
+    filelist = interpreter.get_build_def_files()  # type: T.List[str]
+    filelist = [PurePath(src_dir, x).as_posix() for x in filelist]
+    return filelist
+
+def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
+    result = []  # type: T.List[T.Dict[str, T.Union[str, bool]]]
+    for i in intr.dependencies:
+        keys = [
+            'name',
+            'required',
+            'version',
+            'has_fallback',
+            'conditional',
+        ]
+        result += [{k: v for k, v in i.items() if k in keys}]
+    return result
+
+def list_deps(coredata: cdata.CoreData) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]:
+    result = []  # type: T.List[T.Dict[str, T.Union[str, T.List[str]]]]
+    for d in coredata.deps.host.values():
+        if d.found():
+            result += [{'name': d.name,
+                        'version': d.get_version(),
+                        'compile_args': d.get_compile_args(),
+                        'link_args': d.get_link_args()}]
+    return result
+
+def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+    result = []  # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]
+    for t in testdata:
+        to = {}  # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]
+        if isinstance(t.fname, str):
+            fname = [t.fname]
+        else:
+            fname = t.fname
+        to['cmd'] = fname + t.cmd_args
+        if isinstance(t.env, build.EnvironmentVariables):
+            to['env'] = t.env.get_env({})
+        else:
+            to['env'] = t.env
+        to['name'] = t.name
+        to['workdir'] = t.workdir
+        to['timeout'] = t.timeout
+        to['suite'] = t.suite
+        to['is_parallel'] = t.is_parallel
+        to['priority'] = t.priority
+        to['protocol'] = str(t.protocol)
+        to['depends'] = t.depends
+        result.append(to)
+    return result
+
+def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+    return get_test_list(testdata)
+
+def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+    return get_test_list(benchdata)
+
+def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
+    result = {'version': builddata.project_version,
+              'descriptive_name': builddata.project_name,
+              'subproject_dir': builddata.subproject_dir}    # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]
+    subprojects = []
+    for k, v in builddata.subprojects.items():
+        c = {'name': k,
+             'version': v,
+             'descriptive_name': builddata.projects.get(k)}  # type: T.Dict[str, str]
+        subprojects.append(c)
+    result['subprojects'] = subprojects
+    return result
+
+def list_projinfo_from_source(intr: IntrospectionInterpreter) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
+    sourcedir = intr.source_root
+    files = find_buildsystem_files_list(sourcedir)
+    files = [os.path.normpath(x) for x in files]
+
+    for i in intr.project_data['subprojects']:
+        basedir = os.path.join(intr.subproject_dir, i['name'])
+        i['buildsystem_files'] = [x for x in files if x.startswith(basedir)]
+        files = [x for x in files if not x.startswith(basedir)]
+
+    intr.project_data['buildsystem_files'] = files
+    intr.project_data['subproject_dir'] = intr.subproject_dir
+    return intr.project_data
+
+def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
+    if not results and not options.force_dict:
+        print('No command specified')
+        return 1
+    elif len(results) == 1 and not options.force_dict:
+        # Make to keep the existing output format for a single option
+        print(json.dumps(results[0][1], indent=indent))
+    else:
+        out = {}
+        for i in results:
+            out[i[0]] = i[1]
+        print(json.dumps(out, indent=indent))
+    return 0
+
+def get_infodir(builddir: T.Optional[str] = None) -> str:
+    infodir = 'meson-info'
+    if builddir is not None:
+        infodir = os.path.join(builddir, infodir)
+    return infodir
+
+def get_info_file(infodir: str, kind: T.Optional[str] = None) -> str:
+    return os.path.join(infodir,
+                        'meson-info.json' if not kind else f'intro-{kind}.json')
+
+def load_info_file(infodir: str, kind: T.Optional[str] = None) -> T.Any:
+    with open(get_info_file(infodir, kind), encoding='utf-8') as fp:
+        return json.load(fp)
+
+def run(options: argparse.Namespace) -> int:
+    datadir = 'meson-private'
+    infodir = get_infodir(options.builddir)
+    if options.builddir is not None:
+        datadir = os.path.join(options.builddir, datadir)
+    indent = 4 if options.indent else None
+    results = []  # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+    sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
+    intro_types = get_meson_introspection_types(sourcedir=sourcedir)
+
+    if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
+        # Make sure that log entries in other parts of meson don't interfere with the JSON output
+        mlog.disable()
+        backend = backends.get_backend_from_name(options.backend)
+        assert backend is not None
+        intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+        intr.analyze()
+        # Re-enable logging just in case
+        mlog.enable()
+        for key, val in intro_types.items():
+            if (not options.all and not getattr(options, key, False)) or not val.no_bd:
+                continue
+            results += [(key, val.no_bd(intr))]
+        return print_results(options, results, indent)
+
+    try:
+        raw = load_info_file(infodir)
+        intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0')
+    except FileNotFoundError:
+        if not os.path.isdir(datadir) or not os.path.isdir(infodir):
+            print('Current directory is not a meson build directory.\n'
+                  'Please specify a valid build dir or change the working directory to it.')
+        else:
+            print('Introspection file {} does not exist.\n'
+                  'It is also possible that the build directory was generated with an old\n'
+                  'meson version. Please regenerate it in this case.'.format(get_info_file(infodir)))
+        return 1
+
+    vers_to_check = get_meson_introspection_required_version()
+    for i in vers_to_check:
+        if not mesonlib.version_compare(intro_vers, i):
+            print('Introspection version {} is not supported. '
+                  'The required version is: {}'
+                  .format(intro_vers, ' and '.join(vers_to_check)))
+            return 1
+
+    # Extract introspection information from JSON
+    for i in intro_types.keys():
+        if not intro_types[i].func:
+            continue
+        if not options.all and not getattr(options, i, False):
+            continue
+        try:
+            results += [(i, load_info_file(infodir, i))]
+        except FileNotFoundError:
+            print('Introspection file {} does not exist.'.format(get_info_file(infodir, i)))
+            return 1
+
+    return print_results(options, results, indent)
+
+updated_introspection_files = []  # type: T.List[str]
+
+def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None:
+    global updated_introspection_files
+    for i in intro_info:
+        out_file = os.path.join(info_dir, 'intro-{}.json'.format(i[0]))
+        tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+        with open(tmp_file, 'w', encoding='utf-8') as fp:
+            json.dump(i[1], fp)
+            fp.flush() # Not sure if this is needed
+        os.replace(tmp_file, out_file)
+        updated_introspection_files += [i[0]]
+
+def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None:
+    coredata = builddata.environment.get_coredata()
+    intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
+    intro_info = []  # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+
+    for key, val in intro_types.items():
+        if not val.func:
+            continue
+        intro_info += [(key, val.func())]
+
+    write_intro_info(intro_info, builddata.environment.info_dir)
+
+def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None:
+    intro_info = [
+        ('buildoptions', list_buildoptions(coredata))
+    ]
+
+    write_intro_info(intro_info, info_dir)
+
+def split_version_string(version: str) -> T.Dict[str, T.Union[str, int]]:
+    vers_list = version.split('.')
+    return {
+        'full': version,
+        'major': int(vers_list[0] if len(vers_list) > 0 else 0),
+        'minor': int(vers_list[1] if len(vers_list) > 1 else 0),
+        'patch': int(vers_list[2] if len(vers_list) > 2 else 0)
+    }
+
+def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False) -> None:
+    global updated_introspection_files
+    info_dir = builddata.environment.info_dir
+    info_file = get_meson_info_file(info_dir)
+    intro_types = get_meson_introspection_types()
+    intro_info = {}
+
+    for i in intro_types.keys():
+        if not intro_types[i].func:
+            continue
+        intro_info[i] = {
+            'file': f'intro-{i}.json',
+            'updated': i in updated_introspection_files
+        }
+
+    info_data = {
+        'meson_version': split_version_string(cdata.version),
+        'directories': {
+            'source': builddata.environment.get_source_dir(),
+            'build': builddata.environment.get_build_dir(),
+            'info': info_dir,
+        },
+        'introspection': {
+            'version': split_version_string(get_meson_introspection_version()),
+            'information': intro_info,
+        },
+        'build_files_updated': build_files_updated,
+    }
+
+    if errors:
+        info_data['error'] = True
+        info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors]
+    else:
+        info_data['error'] = False
+
+    # Write the data to disc
+    tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+    with open(tmp_file, 'w', encoding='utf-8') as fp:
+        json.dump(info_data, fp)
+        fp.flush()
+    os.replace(tmp_file, info_file)
diff --git a/meson/mesonbuild/mlog.py b/meson/mesonbuild/mlog.py
new file mode 100644
index 000000000..06d8a0b49
--- /dev/null
+++ b/meson/mesonbuild/mlog.py
@@ -0,0 +1,395 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import io
+import sys
+import time
+import platform
+import typing as T
+from contextlib import contextmanager
+from pathlib import Path
+
+if T.TYPE_CHECKING:
+    from ._typing import StringProtocol, SizedStringProtocol
+
+"""This is (mostly) a standalone module used to write logging
+information about Meson runs. Some output goes to screen,
+some to logging dir and some goes to both."""
+
+def _windows_ansi() -> bool:
+    # windll only exists on windows, so mypy will get mad
+    from ctypes import windll, byref  # type: ignore
+    from ctypes.wintypes import DWORD
+
+    kernel = windll.kernel32
+    stdout = kernel.GetStdHandle(-11)
+    mode = DWORD()
+    if not kernel.GetConsoleMode(stdout, byref(mode)):
+        return False
+    # ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0x4
+    # If the call to enable VT processing fails (returns 0), we fallback to
+    # original behavior
+    return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
+
+def colorize_console() -> bool:
+    _colorize_console = getattr(sys.stdout, 'colorize_console', None)  # type: bool
+    if _colorize_console is not None:
+        return _colorize_console
+
+    try:
+        if platform.system().lower() == 'windows':
+            _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+        else:
+            _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
+    except Exception:
+        _colorize_console = False
+
+    sys.stdout.colorize_console = _colorize_console  # type: ignore[attr-defined]
+    return _colorize_console
+
+def setup_console() -> None:
+    # on Windows, a subprocess might call SetConsoleMode() on the console
+    # connected to stdout and turn off ANSI escape processing. Call this after
+    # running a subprocess to ensure we turn it on again.
+    if platform.system().lower() == 'windows':
+        try:
+            delattr(sys.stdout, 'colorize_console')
+        except AttributeError:
+            pass
+
+log_dir = None               # type: T.Optional[str]
+log_file = None              # type: T.Optional[T.TextIO]
+log_fname = 'meson-log.txt'  # type: str
+log_depth = []               # type: T.List[str]
+log_timestamp_start = None   # type: T.Optional[float]
+log_fatal_warnings = False   # type: bool
+log_disable_stdout = False   # type: bool
+log_errors_only = False      # type: bool
+_in_ci = 'CI' in os.environ  # type: bool
+_logged_once = set()         # type: T.Set[T.Tuple[str, ...]]
+log_warnings_counter = 0     # type: int
+
+def disable() -> None:
+    global log_disable_stdout
+    log_disable_stdout = True
+
+def enable() -> None:
+    global log_disable_stdout
+    log_disable_stdout = False
+
+def set_quiet() -> None:
+    global log_errors_only
+    log_errors_only = True
+
+def set_verbose() -> None:
+    global log_errors_only
+    log_errors_only = False
+
+def initialize(logdir: str, fatal_warnings: bool = False) -> None:
+    global log_dir, log_file, log_fatal_warnings
+    log_dir = logdir
+    log_file = open(os.path.join(logdir, log_fname), 'w', encoding='utf-8')
+    log_fatal_warnings = fatal_warnings
+
+def set_timestamp_start(start: float) -> None:
+    global log_timestamp_start
+    log_timestamp_start = start
+
+def shutdown() -> T.Optional[str]:
+    global log_file
+    if log_file is not None:
+        path = log_file.name
+        exception_around_goer = log_file
+        log_file = None
+        exception_around_goer.close()
+        return path
+    return None
+
+class AnsiDecorator:
+    plain_code = "\033[0m"
+
+    def __init__(self, text: str, code: str, quoted: bool = False):
+        self.text = text
+        self.code = code
+        self.quoted = quoted
+
+    def get_text(self, with_codes: bool) -> str:
+        text = self.text
+        if with_codes and self.code:
+            text = self.code + self.text + AnsiDecorator.plain_code
+        if self.quoted:
+            text = f'"{text}"'
+        return text
+
+    def __len__(self) -> int:
+        return len(self.text)
+
+    def __str__(self) -> str:
+        return self.get_text(colorize_console())
+
+TV_Loggable = T.Union[str, AnsiDecorator, 'StringProtocol']
+TV_LoggableList = T.List[TV_Loggable]
+
+class AnsiText:
+    def __init__(self, *args: 'SizedStringProtocol'):
+        self.args = args
+
+    def __len__(self) -> int:
+        return sum(len(x) for x in self.args)
+
+    def __str__(self) -> str:
+        return ''.join(str(x) for x in self.args)
+
+
+def bold(text: str, quoted: bool = False) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1m", quoted=quoted)
+
+def plain(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "")
+
+def red(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;31m")
+
+def green(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;32m")
+
+def yellow(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;33m")
+
+def blue(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;34m")
+
+def cyan(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;36m")
+
+def normal_red(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[31m")
+
+def normal_green(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[32m")
+
+def normal_yellow(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[33m")
+
+def normal_blue(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[34m")
+
+def normal_cyan(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[36m")
+
+# This really should be AnsiDecorator or anything that implements
+# __str__(), but that requires protocols from typing_extensions
+def process_markup(args: T.Sequence[TV_Loggable], keep: bool) -> T.List[str]:
+    arr = []  # type: T.List[str]
+    if log_timestamp_start is not None:
+        arr = ['[{:.3f}]'.format(time.monotonic() - log_timestamp_start)]
+    for arg in args:
+        if arg is None:
+            continue
+        if isinstance(arg, str):
+            arr.append(arg)
+        elif isinstance(arg, AnsiDecorator):
+            arr.append(arg.get_text(keep))
+        else:
+            arr.append(str(arg))
+    return arr
+
+def force_print(*args: str, nested: str, **kwargs: T.Any) -> None:
+    if log_disable_stdout:
+        return
+    iostr = io.StringIO()
+    kwargs['file'] = iostr
+    print(*args, **kwargs)
+
+    raw = iostr.getvalue()
+    if log_depth:
+        prepend = log_depth[-1] + '| ' if nested else ''
+        lines = []
+        for l in raw.split('\n'):
+            l = l.strip()
+            lines.append(prepend + l if l else '')
+        raw = '\n'.join(lines)
+
+    # _Something_ is going to get printed.
+    try:
+        print(raw, end='')
+    except UnicodeEncodeError:
+        cleaned = raw.encode('ascii', 'replace').decode('ascii')
+        print(cleaned, end='')
+
+# We really want a heterogeneous dict for this, but that's in typing_extensions
+def debug(*args: TV_Loggable, **kwargs: T.Any) -> None:
+    arr = process_markup(args, False)
+    if log_file is not None:
+        print(*arr, file=log_file, **kwargs)
+        log_file.flush()
+
+def _debug_log_cmd(cmd: str, args: T.List[str]) -> None:
+    if not _in_ci:
+        return
+    args = [f'"{x}"' for x in args]  # Quote all args, just in case
+    debug('!meson_ci!/{} {}'.format(cmd, ' '.join(args)))
+
+def cmd_ci_include(file: str) -> None:
+    _debug_log_cmd('ci_include', [file])
+
+
+def log(*args: TV_Loggable, is_error: bool = False,
+        once: bool = False, **kwargs: T.Any) -> None:
+    if once:
+        return log_once(*args, is_error=is_error, **kwargs)
+    return _log(*args, is_error=is_error, **kwargs)
+
+
+def _log(*args: TV_Loggable, is_error: bool = False,
+        **kwargs: T.Any) -> None:
+    nested = kwargs.pop('nested', True)
+    arr = process_markup(args, False)
+    if log_file is not None:
+        print(*arr, file=log_file, **kwargs)
+        log_file.flush()
+    if colorize_console():
+        arr = process_markup(args, True)
+    if not log_errors_only or is_error:
+        force_print(*arr, nested=nested, **kwargs)
+
+def log_once(*args: TV_Loggable, is_error: bool = False,
+             **kwargs: T.Any) -> None:
+    """Log variant that only prints a given message one time per meson invocation.
+
+    This considers ansi decorated values by the values they wrap without
+    regard for the AnsiDecorator itself.
+    """
+    def to_str(x: TV_Loggable) -> str:
+        if isinstance(x, str):
+            return x
+        if isinstance(x, AnsiDecorator):
+            return x.text
+        return str(x)
+    t = tuple(to_str(a) for a in args)
+    if t in _logged_once:
+        return
+    _logged_once.add(t)
+    _log(*args, is_error=is_error, **kwargs)
+
+# This isn't strictly correct. What we really want here is something like:
+# class StringProtocol(typing_extensions.Protocol):
+#
+#      def __str__(self) -> str: ...
+#
+# This would more accurately embody what this function can handle, but we
+# don't have that yet, so instead we'll do some casting to work around it
+def get_error_location_string(fname: str, lineno: str) -> str:
+    return f'{fname}:{lineno}:'
+
+def _log_error(severity: str, *rargs: TV_Loggable,
+               once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None:
+    from .mesonlib import MesonException, relpath
+
+    # The typing requirements here are non-obvious. Lists are invariant,
+    # therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined
+    if severity == 'notice':
+        label = [bold('NOTICE:')]  # type: TV_LoggableList
+    elif severity == 'warning':
+        label = [yellow('WARNING:')]
+    elif severity == 'error':
+        label = [red('ERROR:')]
+    elif severity == 'deprecation':
+        label = [red('DEPRECATION:')]
+    else:
+        raise MesonException('Invalid severity ' + severity)
+    # rargs is a tuple, not a list
+    args = label + list(rargs)
+
+    location = kwargs.pop('location', None)
+    if location is not None:
+        location_file = relpath(location.filename, os.getcwd())
+        location_str = get_error_location_string(location_file, location.lineno)
+        # Unions are frankly awful, and we have to T.cast here to get mypy
+        # to understand that the list concatenation is safe
+        location_list = T.cast(TV_LoggableList, [location_str])
+        args = location_list + args
+
+    log(*args, once=once, **kwargs)
+
+    global log_warnings_counter
+    log_warnings_counter += 1
+
+    if log_fatal_warnings and fatal:
+        raise MesonException("Fatal warnings enabled, aborting")
+
+def error(*args: TV_Loggable, **kwargs: T.Any) -> None:
+    return _log_error('error', *args, **kwargs, is_error=True)
+
+def warning(*args: TV_Loggable, **kwargs: T.Any) -> None:
+    return _log_error('warning', *args, **kwargs, is_error=True)
+
+def deprecation(*args: TV_Loggable, **kwargs: T.Any) -> None:
+    return _log_error('deprecation', *args, **kwargs, is_error=True)
+
+def notice(*args: TV_Loggable, **kwargs: T.Any) -> None:
+    return _log_error('notice', *args, **kwargs, is_error=False)
+
+def get_relative_path(target: Path, current: Path) -> Path:
+    """Get the path to target from current"""
+    # Go up "current" until we find a common ancestor to target
+    acc = ['.']
+    for part in [current, *current.parents]:
+        try:
+            path = target.relative_to(part)
+            return Path(*acc, path)
+        except ValueError:
+            pass
+        acc += ['..']
+
+    # we failed, should not get here
+    return target
+
+def exception(e: Exception, prefix: T.Optional[AnsiDecorator] = None) -> None:
+    if prefix is None:
+        prefix = red('ERROR:')
+    log()
+    args = []  # type: T.List[T.Union[AnsiDecorator, str]]
+    if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
+        # Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
+        # that this is correct, so we'll just ignore it.
+        path = get_relative_path(Path(e.file), Path(os.getcwd()))  # type: ignore
+        args.append(f'{path}:{e.lineno}:{e.colno}:')  # type: ignore
+    if prefix:
+        args.append(prefix)
+    args.append(str(e))
+    log(*args)
+
+# Format a list for logging purposes as a string. It separates
+# all but the last item with commas, and the last with 'and'.
+def format_list(input_list: T.List[str]) -> str:
+    l = len(input_list)
+    if l > 2:
+        return ' and '.join([', '.join(input_list[:-1]), input_list[-1]])
+    elif l == 2:
+        return ' and '.join(input_list)
+    elif l == 1:
+        return input_list[0]
+    else:
+        return ''
+
+@contextmanager
+def nested(name: str = '') -> T.Generator[None, None, None]:
+    global log_depth
+    log_depth.append(name)
+    try:
+        yield
+    finally:
+        log_depth.pop()
diff --git a/meson/mesonbuild/modules/__init__.py b/meson/mesonbuild/modules/__init__.py
new file mode 100644
index 000000000..737a01c2f
--- /dev/null
+++ b/meson/mesonbuild/modules/__init__.py
@@ -0,0 +1,212 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+
+import os
+import typing as T
+
+from .. import build, mesonlib
+from ..mesonlib import relpath, HoldableObject
+from ..interpreterbase.decorators import noKwargs, noPosargs
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+    from ..interpreterbase import TYPE_var, TYPE_kwargs
+    from ..programs import ExternalProgram
+
+class ModuleState:
+    """Object passed to all module methods.
+
+    This is a WIP API provided to modules, it should be extended to have everything
+    needed so modules does not touch any other part of Meson internal APIs.
+    """
+
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        # Keep it private, it should be accessed only through methods.
+        self._interpreter = interpreter
+
+        self.source_root = interpreter.environment.get_source_dir()
+        self.build_to_src = relpath(interpreter.environment.get_source_dir(),
+                                    interpreter.environment.get_build_dir())
+        self.subproject = interpreter.subproject
+        self.subdir = interpreter.subdir
+        self.current_lineno = interpreter.current_lineno
+        self.environment = interpreter.environment
+        self.project_name = interpreter.build.project_name
+        self.project_version = interpreter.build.dep_manifest[interpreter.active_projectname]
+        # The backend object is under-used right now, but we will need it:
+        # https://github.com/mesonbuild/meson/issues/1419
+        self.backend = interpreter.backend
+        self.targets = interpreter.build.targets
+        self.data = interpreter.build.data
+        self.headers = interpreter.build.get_headers()
+        self.man = interpreter.build.get_man()
+        self.global_args = interpreter.build.global_args.host
+        self.project_args = interpreter.build.projects_args.host.get(interpreter.subproject, {})
+        self.build_machine = interpreter.builtin['build_machine'].held_object
+        self.host_machine = interpreter.builtin['host_machine'].held_object
+        self.target_machine = interpreter.builtin['target_machine'].held_object
+        self.current_node = interpreter.current_node
+
+    def get_include_args(self, include_dirs: T.Iterable[T.Union[str, build.IncludeDirs]], prefix: str = '-I') -> T.List[str]:
+        if not include_dirs:
+            return []
+
+        srcdir = self.environment.get_source_dir()
+        builddir = self.environment.get_build_dir()
+
+        dirs_str: T.List[str] = []
+        for dirs in include_dirs:
+            if isinstance(dirs, str):
+                dirs_str += [f'{prefix}{dirs}']
+                continue
+
+            # Should be build.IncludeDirs object.
+            basedir = dirs.get_curdir()
+            for d in dirs.get_incdirs():
+                expdir = os.path.join(basedir, d)
+                srctreedir = os.path.join(srcdir, expdir)
+                buildtreedir = os.path.join(builddir, expdir)
+                dirs_str += [f'{prefix}{buildtreedir}',
+                             f'{prefix}{srctreedir}']
+            for d in dirs.get_extra_build_dirs():
+                dirs_str += [f'{prefix}{d}']
+
+        return dirs_str
+
+    def find_program(self, prog: T.Union[str, T.List[str]], required: bool = True,
+                     version_func: T.Optional[T.Callable[['ExternalProgram'], str]] = None,
+                     wanted: T.Optional[str] = None) -> 'ExternalProgram':
+        return self._interpreter.find_program_impl(prog, required=required, version_func=version_func, wanted=wanted)
+
+    def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'ExternalProgram', mesonlib.File]],
+             workdir: T.Optional[str] = None,
+             env: T.Union[T.List[str], T.Dict[str, str], str] = None,
+             depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]] = None) -> None:
+        kwargs = {'workdir': workdir,
+                  'env': env,
+                  'depends': depends,
+                  }
+        # TODO: Use interpreter internal API, but we need to go through @typed_kwargs
+        self._interpreter.func_test(self.current_node, args, kwargs)
+
+
+class ModuleObject(HoldableObject):
+    """Base class for all objects returned by modules
+    """
+    def __init__(self) -> None:
+        self.methods: T.Dict[
+            str,
+            T.Callable[[ModuleState, T.List['TYPE_var'], 'TYPE_kwargs'], T.Union[ModuleReturnValue, 'TYPE_var']]
+        ] = {}
+
+
+class MutableModuleObject(ModuleObject):
+    pass
+
+
+# FIXME: Port all modules to stop using self.interpreter and use API on
+# ModuleState instead. Modules should stop using this class and instead use
+# ModuleObject base class.
+class ExtensionModule(ModuleObject):
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__()
+        self.interpreter = interpreter
+        self.methods.update({
+            'found': self.found_method,
+        })
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self.found()
+
+    @staticmethod
+    def found() -> bool:
+        return True
+
+
+class NewExtensionModule(ModuleObject):
+
+    """Class for modern modules
+
+    provides the found method.
+    """
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.methods.update({
+            'found': self.found_method,
+        })
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self.found()
+
+    @staticmethod
+    def found() -> bool:
+        return True
+
+
+class NotFoundExtensionModule(NewExtensionModule):
+
+    """Class for modern modules
+
+    provides the found method.
+    """
+
+    @staticmethod
+    def found() -> bool:
+        return False
+
+
+def is_module_library(fname):
+    '''
+    Check if the file is a library-like file generated by a module-specific
+    target, such as GirTarget or TypelibTarget
+    '''
+    if hasattr(fname, 'fname'):
+        fname = fname.fname
+    suffix = fname.split('.')[-1]
+    return suffix in ('gir', 'typelib')
+
+
+class ModuleReturnValue:
+    def __init__(self, return_value: T.Optional['TYPE_var'], new_objects: T.List['TYPE_var']) -> None:
+        self.return_value = return_value
+        assert(isinstance(new_objects, list))
+        self.new_objects = new_objects
+
+class GResourceTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, kwargs):
+        super().__init__(name, subdir, subproject, kwargs)
+
+class GResourceHeaderTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, kwargs):
+        super().__init__(name, subdir, subproject, kwargs)
+
+class GirTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, kwargs):
+        super().__init__(name, subdir, subproject, kwargs)
+
+class TypelibTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, kwargs):
+        super().__init__(name, subdir, subproject, kwargs)
+
+class VapiTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, kwargs):
+        super().__init__(name, subdir, subproject, kwargs)
diff --git a/meson/mesonbuild/modules/cmake.py b/meson/mesonbuild/modules/cmake.py
new file mode 100644
index 000000000..cc259dcdc
--- /dev/null
+++ b/meson/mesonbuild/modules/cmake.py
@@ -0,0 +1,406 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import re
+import os, os.path, pathlib
+import shutil
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleObject
+
+from .. import build, mesonlib, mlog, dependencies
+from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args
+from ..interpreter import ConfigurationDataObject, SubprojectHolder
+from ..interpreterbase import (
+    FeatureNew,
+    FeatureNewKwargs,
+    FeatureDeprecatedKwargs,
+
+    stringArgs,
+    permittedKwargs,
+    noPosargs,
+    noKwargs,
+
+    InvalidArguments,
+    InterpreterException,
+)
+from ..programs import ExternalProgram
+
+
+COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
+
+# Taken from https://github.com/Kitware/CMake/blob/master/Modules/CMakePackageConfigHelpers.cmake
+PACKAGE_INIT_BASE = '''
+####### Expanded from \\@PACKAGE_INIT\\@ by configure_package_config_file() #######
+####### Any changes to this file will be overwritten by the next CMake run ####
+####### The input file was @inputFileName@ ########
+
+get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/@PACKAGE_RELATIVE_PATH@" ABSOLUTE)
+'''
+PACKAGE_INIT_EXT = '''
+# Use original install prefix when loaded through a "/usr move"
+# cross-prefix symbolic link such as /lib -> /usr/lib.
+get_filename_component(_realCurr "${CMAKE_CURRENT_LIST_DIR}" REALPATH)
+get_filename_component(_realOrig "@absInstallDir@" REALPATH)
+if(_realCurr STREQUAL _realOrig)
+  set(PACKAGE_PREFIX_DIR "@installPrefix@")
+endif()
+unset(_realOrig)
+unset(_realCurr)
+'''
+PACKAGE_INIT_SET_AND_CHECK = '''
+macro(set_and_check _var _file)
+  set(${_var} "${_file}")
+  if(NOT EXISTS "${_file}")
+    message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
+  endif()
+endmacro()
+
+####################################################################################
+'''
+
+class CMakeSubproject(ModuleObject):
+    def __init__(self, subp, pv):
+        assert(isinstance(subp, SubprojectHolder))
+        assert(hasattr(subp, 'cm_interpreter'))
+        super().__init__()
+        self.subp = subp
+        self.methods.update({'get_variable': self.get_variable,
+                             'dependency': self.dependency,
+                             'include_directories': self.include_directories,
+                             'target': self.target,
+                             'target_type': self.target_type,
+                             'target_list': self.target_list,
+                             'found': self.found_method,
+                             })
+
+    def _args_to_info(self, args):
+        if len(args) != 1:
+            raise InterpreterException('Exactly one argument is required.')
+
+        tgt = args[0]
+        res = self.subp.cm_interpreter.target_info(tgt)
+        if res is None:
+            raise InterpreterException(f'The CMake target {tgt} does not exist\n' +
+                                       '  Use the following command in your meson.build to list all available targets:\n\n' +
+                                       '    message(\'CMaket targets:\\n - \' + \'\\n - \'.join(.target_list()))')
+
+        # Make sure that all keys are present (if not this is a bug)
+        assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']]))
+        return res
+
+    @noKwargs
+    @stringArgs
+    def get_variable(self, state, args, kwargs):
+        return self.subp.get_variable_method(args, kwargs)
+
+    @FeatureNewKwargs('dependency', '0.56.0', ['include_type'])
+    @permittedKwargs({'include_type'})
+    @stringArgs
+    def dependency(self, state, args, kwargs):
+        info = self._args_to_info(args)
+        if info['func'] == 'executable':
+            raise InvalidArguments(f'{args[0]} is an executable and does not support the dependency() method. Use target() instead.')
+        orig = self.get_variable(state, [info['dep']], {})
+        assert isinstance(orig, dependencies.Dependency)
+        actual = orig.include_type
+        if 'include_type' in kwargs and kwargs['include_type'] != actual:
+            mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type']))
+            return orig.generate_system_dependency(kwargs['include_type'])
+        return orig
+
+    @noKwargs
+    @stringArgs
+    def include_directories(self, state, args, kwargs):
+        info = self._args_to_info(args)
+        return self.get_variable(state, [info['inc']], kwargs)
+
+    @noKwargs
+    @stringArgs
+    def target(self, state, args, kwargs):
+        info = self._args_to_info(args)
+        return self.get_variable(state, [info['tgt']], kwargs)
+
+    @noKwargs
+    @stringArgs
+    def target_type(self, state, args, kwargs):
+        info = self._args_to_info(args)
+        return info['func']
+
+    @noPosargs
+    @noKwargs
+    def target_list(self, state, args, kwargs):
+        return self.subp.cm_interpreter.target_list()
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('CMakeSubproject.found()', '0.53.2')
+    def found_method(self, state, args, kwargs):
+        return self.subp is not None
+
+
+class CMakeSubprojectOptions(ModuleObject):
+    def __init__(self) -> None:
+        super().__init__()
+        self.cmake_options = []  # type: T.List[str]
+        self.target_options = TargetOptions()
+
+        self.methods.update(
+            {
+                'add_cmake_defines': self.add_cmake_defines,
+                'set_override_option': self.set_override_option,
+                'set_install': self.set_install,
+                'append_compile_args': self.append_compile_args,
+                'append_link_args': self.append_link_args,
+                'clear': self.clear,
+            }
+        )
+
+    def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
+        if 'target' in kwargs:
+            return self.target_options[kwargs['target']]
+        return self.target_options.global_options
+
+    @noKwargs
+    def add_cmake_defines(self, state, args, kwargs) -> None:
+        self.cmake_options += cmake_defines_to_args(args)
+
+    @stringArgs
+    @permittedKwargs({'target'})
+    def set_override_option(self, state, args, kwargs) -> None:
+        if len(args) != 2:
+            raise InvalidArguments('set_override_option takes exactly 2 positional arguments')
+        self._get_opts(kwargs).set_opt(args[0], args[1])
+
+    @permittedKwargs({'target'})
+    def set_install(self, state, args, kwargs) -> None:
+        if len(args) != 1 or not isinstance(args[0], bool):
+            raise InvalidArguments('set_install takes exactly 1 boolean argument')
+        self._get_opts(kwargs).set_install(args[0])
+
+    @stringArgs
+    @permittedKwargs({'target'})
+    def append_compile_args(self, state, args, kwargs) -> None:
+        if len(args) < 2:
+            raise InvalidArguments('append_compile_args takes at least 2 positional arguments')
+        self._get_opts(kwargs).append_args(args[0], args[1:])
+
+    @stringArgs
+    @permittedKwargs({'target'})
+    def append_link_args(self, state, args, kwargs) -> None:
+        if not args:
+            raise InvalidArguments('append_link_args takes at least 1 positional argument')
+        self._get_opts(kwargs).append_link_args(args)
+
+    @noPosargs
+    @noKwargs
+    def clear(self, state, args, kwargs) -> None:
+        self.cmake_options.clear()
+        self.target_options = TargetOptions()
+
+
+class CmakeModule(ExtensionModule):
+    cmake_detected = False
+    cmake_root = None
+
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'write_basic_package_version_file': self.write_basic_package_version_file,
+            'configure_package_config_file': self.configure_package_config_file,
+            'subproject': self.subproject,
+            'subproject_options': self.subproject_options,
+        })
+
+    def detect_voidp_size(self, env):
+        compilers = env.coredata.compilers.host
+        compiler = compilers.get('c', None)
+        if not compiler:
+            compiler = compilers.get('cpp', None)
+
+        if not compiler:
+            raise mesonlib.MesonException('Requires a C or C++ compiler to compute sizeof(void *).')
+
+        return compiler.sizeof('void *', '', env)
+
+    def detect_cmake(self):
+        if self.cmake_detected:
+            return True
+
+        cmakebin = ExternalProgram('cmake', silent=False)
+        p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3]
+        if p.returncode != 0:
+            mlog.log(f'error retrieving cmake information: returnCode={p.returncode} stdout={stdout} stderr={stderr}')
+            return False
+
+        match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip())
+        if not match:
+            mlog.log('unable to determine cmake root')
+            return False
+
+        cmakePath = pathlib.PurePath(match.group(1))
+        self.cmake_root = os.path.join(*cmakePath.parts)
+        self.cmake_detected = True
+        return True
+
+    @permittedKwargs({'version', 'name', 'compatibility', 'install_dir'})
+    def write_basic_package_version_file(self, state, _args, kwargs):
+        version = kwargs.get('version', None)
+        if not isinstance(version, str):
+            raise mesonlib.MesonException('Version must be specified.')
+
+        name = kwargs.get('name', None)
+        if not isinstance(name, str):
+            raise mesonlib.MesonException('Name not specified.')
+
+        compatibility = kwargs.get('compatibility', 'AnyNewerVersion')
+        if not isinstance(compatibility, str):
+            raise mesonlib.MesonException('compatibility is not string.')
+        if compatibility not in COMPATIBILITIES:
+            raise mesonlib.MesonException('compatibility must be either AnyNewerVersion, SameMajorVersion or ExactVersion.')
+
+        if not self.detect_cmake():
+            raise mesonlib.MesonException('Unable to find cmake')
+
+        pkgroot = kwargs.get('install_dir', None)
+        if pkgroot is None:
+            pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)
+        if not isinstance(pkgroot, str):
+            raise mesonlib.MesonException('Install_dir must be a string.')
+
+        template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in')
+        if not os.path.exists(template_file):
+            raise mesonlib.MesonException(f'your cmake installation doesn\'t support the {compatibility} compatibility')
+
+        version_file = os.path.join(state.environment.scratch_dir, f'{name}ConfigVersion.cmake')
+
+        conf = {
+            'CVF_VERSION': (version, ''),
+            'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.environment)), '')
+        }
+        mesonlib.do_conf_file(template_file, version_file, conf, 'meson')
+
+        res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), version_file)], pkgroot, None, state.subproject)
+        return ModuleReturnValue(res, [res])
+
+    def create_package_file(self, infile, outfile, PACKAGE_RELATIVE_PATH, extra, confdata):
+        package_init = PACKAGE_INIT_BASE.replace('@PACKAGE_RELATIVE_PATH@', PACKAGE_RELATIVE_PATH)
+        package_init = package_init.replace('@inputFileName@', infile)
+        package_init += extra
+        package_init += PACKAGE_INIT_SET_AND_CHECK
+
+        try:
+            with open(infile, encoding='utf-8') as fin:
+                data = fin.readlines()
+        except Exception as e:
+            raise mesonlib.MesonException('Could not read input file {}: {}'.format(infile, str(e)))
+
+        result = []
+        regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+        for line in data:
+            line = line.replace('@PACKAGE_INIT@', package_init)
+            line, _missing = mesonlib.do_replacement(regex, line, 'meson', confdata)
+
+            result.append(line)
+
+        outfile_tmp = outfile + "~"
+        with open(outfile_tmp, "w", encoding='utf-8') as fout:
+            fout.writelines(result)
+
+        shutil.copymode(infile, outfile_tmp)
+        mesonlib.replace_if_different(outfile, outfile_tmp)
+
+    @permittedKwargs({'input', 'name', 'install_dir', 'configuration'})
+    def configure_package_config_file(self, state, args, kwargs):
+        if args:
+            raise mesonlib.MesonException('configure_package_config_file takes only keyword arguments.')
+
+        if 'input' not in kwargs:
+            raise mesonlib.MesonException('configure_package_config_file requires "input" keyword.')
+        inputfile = kwargs['input']
+        if isinstance(inputfile, list):
+            if len(inputfile) != 1:
+                m = "Keyword argument 'input' requires exactly one file"
+                raise mesonlib.MesonException(m)
+            inputfile = inputfile[0]
+        if not isinstance(inputfile, (str, mesonlib.File)):
+            raise mesonlib.MesonException("input must be a string or a file")
+        if isinstance(inputfile, str):
+            inputfile = mesonlib.File.from_source_file(state.environment.source_dir, state.subdir, inputfile)
+
+        ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir)
+
+        if 'name' not in kwargs:
+            raise mesonlib.MesonException('"name" not specified.')
+        name = kwargs['name']
+
+        (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, f'{name}Config.cmake'))
+        ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
+
+        install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name))
+        if not isinstance(install_dir, str):
+            raise mesonlib.MesonException('"install_dir" must be a string.')
+
+        if 'configuration' not in kwargs:
+            raise mesonlib.MesonException('"configuration" not specified.')
+        conf = kwargs['configuration']
+        if not isinstance(conf, ConfigurationDataObject):
+            raise mesonlib.MesonException('Argument "configuration" is not of type configuration_data')
+
+        prefix = state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+        abs_install_dir = install_dir
+        if not os.path.isabs(abs_install_dir):
+            abs_install_dir = os.path.join(prefix, install_dir)
+
+        PACKAGE_RELATIVE_PATH = os.path.relpath(prefix, abs_install_dir)
+        extra = ''
+        if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir):
+            extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir)
+            extra = extra.replace('@installPrefix@', prefix)
+
+        self.create_package_file(ifile_abs, ofile_abs, PACKAGE_RELATIVE_PATH, extra, conf.conf_data)
+        conf.mark_used()
+
+        conffile = os.path.normpath(inputfile.relative_name())
+        if conffile not in self.interpreter.build_def_files:
+            self.interpreter.build_def_files.append(conffile)
+
+        res = build.Data([mesonlib.File(True, ofile_path, ofile_fname)], install_dir, None, state.subproject)
+        self.interpreter.build.data.append(res)
+
+        return res
+
+    @FeatureNew('subproject', '0.51.0')
+    @FeatureNewKwargs('subproject', '0.55.0', ['options'])
+    @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options'])
+    @permittedKwargs({'cmake_options', 'required', 'options'})
+    @stringArgs
+    def subproject(self, state, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Subproject takes exactly one argument')
+        if 'cmake_options' in kwargs and 'options' in kwargs:
+            raise InterpreterException('"options" cannot be used together with "cmake_options"')
+        dirname = args[0]
+        subp = self.interpreter.do_subproject(dirname, 'cmake', kwargs)
+        if not subp.found():
+            return subp
+        return CMakeSubproject(subp, dirname)
+
+    @FeatureNew('subproject_options', '0.55.0')
+    @noKwargs
+    @noPosargs
+    def subproject_options(self, state, args, kwargs) -> CMakeSubprojectOptions:
+        return CMakeSubprojectOptions()
+
+def initialize(*args, **kwargs):
+    return CmakeModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/dlang.py b/meson/mesonbuild/modules/dlang.py
new file mode 100644
index 000000000..60d28854e
--- /dev/null
+++ b/meson/mesonbuild/modules/dlang.py
@@ -0,0 +1,135 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+
+import json
+import os
+
+from . import ExtensionModule
+from .. import dependencies
+from .. import mlog
+from ..mesonlib import Popen_safe, MesonException
+from ..programs import ExternalProgram
+
+class DlangModule(ExtensionModule):
+    class_dubbin = None
+    init_dub = False
+
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'generate_dub_file': self.generate_dub_file,
+        })
+
+    def _init_dub(self):
+        if DlangModule.class_dubbin is None:
+            self.dubbin = dependencies.DubDependency.class_dubbin
+            DlangModule.class_dubbin = self.dubbin
+        else:
+            self.dubbin = DlangModule.class_dubbin
+
+        if DlangModule.class_dubbin is None:
+            self.dubbin = self.check_dub()
+            DlangModule.class_dubbin = self.dubbin
+        else:
+            self.dubbin = DlangModule.class_dubbin
+
+        if not self.dubbin:
+            if not self.dubbin:
+                raise MesonException('DUB not found.')
+
+    def generate_dub_file(self, state, args, kwargs):
+        if not DlangModule.init_dub:
+            self._init_dub()
+
+        if len(args) < 2:
+            raise MesonException('Missing arguments')
+
+        config = {
+            'name': args[0]
+        }
+
+        config_path = os.path.join(args[1], 'dub.json')
+        if os.path.exists(config_path):
+            with open(config_path, encoding='utf-8') as ofile:
+                try:
+                    config = json.load(ofile)
+                except ValueError:
+                    mlog.warning('Failed to load the data in dub.json')
+
+        warn_publishing = ['description', 'license']
+        for arg in warn_publishing:
+            if arg not in kwargs and \
+               arg not in config:
+                mlog.warning('Without', mlog.bold(arg), 'the DUB package can\'t be published')
+
+        for key, value in kwargs.items():
+            if key == 'dependencies':
+                config[key] = {}
+                if isinstance(value, list):
+                    for dep in value:
+                        if isinstance(dep, dependencies.Dependency):
+                            name = dep.get_name()
+                            ret, res = self._call_dubbin(['describe', name])
+                            if ret == 0:
+                                version = dep.get_version()
+                                if version is None:
+                                    config[key][name] = ''
+                                else:
+                                    config[key][name] = version
+                elif isinstance(value, dependencies.Dependency):
+                    name = value.get_name()
+                    ret, res = self._call_dubbin(['describe', name])
+                    if ret == 0:
+                        version = value.get_version()
+                        if version is None:
+                            config[key][name] = ''
+                        else:
+                            config[key][name] = version
+            else:
+                config[key] = value
+
+        with open(config_path, 'w', encoding='utf-8') as ofile:
+            ofile.write(json.dumps(config, indent=4, ensure_ascii=False))
+
+    def _call_dubbin(self, args, env=None):
+        p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2]
+        return p.returncode, out.strip()
+
+    def check_dub(self):
+        dubbin = ExternalProgram('dub', silent=True)
+        if dubbin.found():
+            try:
+                p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2]
+                if p.returncode != 0:
+                    mlog.warning('Found dub {!r} but couldn\'t run it'
+                                 ''.format(' '.join(dubbin.get_command())))
+                    # Set to False instead of None to signify that we've already
+                    # searched for it and not found it
+                    dubbin = False
+            except (FileNotFoundError, PermissionError):
+                dubbin = False
+        else:
+            dubbin = False
+        if dubbin:
+            mlog.log('Found DUB:', mlog.bold(dubbin.get_path()),
+                     '(%s)' % out.strip())
+        else:
+            mlog.log('Found DUB:', mlog.red('NO'))
+        return dubbin
+
+def initialize(*args, **kwargs):
+    return DlangModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/fs.py b/meson/mesonbuild/modules/fs.py
new file mode 100644
index 000000000..ab3aae2b1
--- /dev/null
+++ b/meson/mesonbuild/modules/fs.py
@@ -0,0 +1,258 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+import hashlib
+import os
+from pathlib import Path, PurePath, PureWindowsPath
+
+from .. import mlog
+from . import ExtensionModule
+from ..mesonlib import (
+    File,
+    FileOrString,
+    MesonException,
+    path_is_in_root,
+)
+from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..interpreter import Interpreter
+
+    from typing_extensions import TypedDict
+
+    class ReadKwArgs(TypedDict):
+        """Keyword Arguments for fs.read."""
+
+        encoding: str
+
+
+class FSModule(ExtensionModule):
+
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__(interpreter)
+        self.methods.update({
+            'expanduser': self.expanduser,
+            'is_absolute': self.is_absolute,
+            'as_posix': self.as_posix,
+            'exists': self.exists,
+            'is_symlink': self.is_symlink,
+            'is_file': self.is_file,
+            'is_dir': self.is_dir,
+            'hash': self.hash,
+            'size': self.size,
+            'is_samepath': self.is_samepath,
+            'replace_suffix': self.replace_suffix,
+            'parent': self.parent,
+            'name': self.name,
+            'stem': self.stem,
+            'read': self.read,
+        })
+
+    def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+        """
+        make an absolute path from a relative path, WITHOUT resolving symlinks
+        """
+        if isinstance(arg, File):
+            return Path(arg.absolute_path(state.source_root, self.interpreter.environment.get_build_dir()))
+        return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
+
+    def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+        """
+        resolves symlinks and makes absolute a directory relative to calling meson.build,
+        if not already absolute
+        """
+        path = self._absolute_dir(state, arg)
+        try:
+            # accommodate unresolvable paths e.g. symlink loops
+            path = path.resolve()
+        except Exception:
+            # return the best we could do
+            pass
+        return path
+
+    @noKwargs
+    @FeatureNew('fs.expanduser', '0.54.0')
+    @typed_pos_args('fs.expanduser', str)
+    def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+        return str(Path(args[0]).expanduser())
+
+    @noKwargs
+    @FeatureNew('fs.is_absolute', '0.54.0')
+    @typed_pos_args('fs.is_absolute', (str, File))
+    def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+        if isinstance(args[0], File):
+            FeatureNew('fs.is_absolute_file', '0.59.0').use(state.subproject)
+        return PurePath(str(args[0])).is_absolute()
+
+    @noKwargs
+    @FeatureNew('fs.as_posix', '0.54.0')
+    @typed_pos_args('fs.as_posix', str)
+    def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+        """
+        this function assumes you are passing a Windows path, even if on a Unix-like system
+        and so ALL '\' are turned to '/', even if you meant to escape a character
+        """
+        return PureWindowsPath(args[0]).as_posix()
+
+    @noKwargs
+    @typed_pos_args('fs.exists', str)
+    def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+        return self._resolve_dir(state, args[0]).exists()
+
+    @noKwargs
+    @typed_pos_args('fs.is_symlink', (str, File))
+    def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+        if isinstance(args[0], File):
+            FeatureNew('fs.is_symlink_file', '0.59.0').use(state.subproject)
+        return self._absolute_dir(state, args[0]).is_symlink()
+
+    @noKwargs
+    @typed_pos_args('fs.is_file', str)
+    def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+        return self._resolve_dir(state, args[0]).is_file()
+
+    @noKwargs
+    @typed_pos_args('fs.is_dir', str)
+    def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+        return self._resolve_dir(state, args[0]).is_dir()
+
+    @noKwargs
+    @typed_pos_args('fs.hash', (str, File), str)
+    def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.hash_file', '0.59.0').use(state.subproject)
+        file = self._resolve_dir(state, args[0])
+        if not file.is_file():
+            raise MesonException(f'{file} is not a file and therefore cannot be hashed')
+        try:
+            h = hashlib.new(args[1])
+        except ValueError:
+            raise MesonException('hash algorithm {} is not available'.format(args[1]))
+        mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size))
+        h.update(file.read_bytes())
+        return h.hexdigest()
+
+    @noKwargs
+    @typed_pos_args('fs.size', (str, File))
+    def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int:
+        if isinstance(args[0], File):
+            FeatureNew('fs.size_file', '0.59.0').use(state.subproject)
+        file = self._resolve_dir(state, args[0])
+        if not file.is_file():
+            raise MesonException(f'{file} is not a file and therefore cannot be sized')
+        try:
+            return file.stat().st_size
+        except ValueError:
+            raise MesonException('{} size could not be determined'.format(args[0]))
+
+    @noKwargs
+    @typed_pos_args('fs.is_samepath', (str, File), (str, File))
+    def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+        if isinstance(args[0], File) or isinstance(args[1], File):
+            FeatureNew('fs.is_samepath_file', '0.59.0').use(state.subproject)
+        file1 = self._resolve_dir(state, args[0])
+        file2 = self._resolve_dir(state, args[1])
+        if not file1.exists():
+            return False
+        if not file2.exists():
+            return False
+        try:
+            return file1.samefile(file2)
+        except OSError:
+            return False
+
+    @noKwargs
+    @typed_pos_args('fs.replace_suffix', (str, File), str)
+    def replace_suffix(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.replace_suffix_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.with_suffix(args[1])
+        return str(new)
+
+    @noKwargs
+    @typed_pos_args('fs.parent', (str, File))
+    def parent(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.parent_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.parent
+        return str(new)
+
+    @noKwargs
+    @typed_pos_args('fs.name', (str, File))
+    def name(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.name_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.name
+        return str(new)
+
+    @noKwargs
+    @typed_pos_args('fs.stem', (str, File))
+    @FeatureNew('fs.stem', '0.54.0')
+    def stem(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.stem_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.stem
+        return str(new)
+
+    @FeatureNew('fs.read', '0.57.0')
+    @typed_pos_args('fs.read', (str, File))
+    @typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8'))
+    def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str:
+        """Read a file from the source tree and return its value as a decoded
+        string.
+
+        If the encoding is not specified, the file is assumed to be utf-8
+        encoded. Paths must be relative by default (to prevent accidents) and
+        are forbidden to be read from the build directory (to prevent build
+        loops)
+        """
+        path = args[0]
+        encoding = kwargs['encoding']
+        src_dir = self.interpreter.environment.source_dir
+        sub_dir = self.interpreter.subdir
+        build_dir = self.interpreter.environment.get_build_dir()
+
+        if isinstance(path, File):
+            if path.is_built:
+                raise MesonException(
+                    'fs.read_file does not accept built files() objects')
+            path = os.path.join(src_dir, path.relative_name())
+        else:
+            if sub_dir:
+                src_dir = os.path.join(src_dir, sub_dir)
+            path = os.path.join(src_dir, path)
+
+        path = os.path.abspath(path)
+        if path_is_in_root(Path(path), Path(build_dir), resolve=True):
+            raise MesonException('path must not be in the build tree')
+        try:
+            with open(path, encoding=encoding) as f:
+                data = f.read()
+        except UnicodeDecodeError:
+            raise MesonException(f'decoding failed for {path}')
+        # Reconfigure when this file changes as it can contain data used by any
+        # part of the build configuration (e.g. `project(..., version:
+        # fs.read_file('VERSION')` or `configure_file(...)`
+        self.interpreter.add_build_def_file(path)
+        return data
+
+
+def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule:
+    return FSModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/gnome.py b/meson/mesonbuild/modules/gnome.py
new file mode 100644
index 000000000..881e4240e
--- /dev/null
+++ b/meson/mesonbuild/modules/gnome.py
@@ -0,0 +1,1812 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for Gnome/GLib related
+functionality such as gobject-introspection, gresources and gtk-doc'''
+
+import os
+import copy
+import subprocess
+import functools
+import typing as T
+
+from .. import build
+from .. import mlog
+from .. import mesonlib
+from .. import interpreter
+from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
+from . import ExtensionModule
+from . import ModuleReturnValue
+from ..mesonlib import (
+    MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list,
+    join_args, HoldableObject
+)
+from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
+from ..interpreterbase import noPosargs, noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs
+from ..interpreterbase import typed_kwargs, KwargInfo, ContainerTypeInfo
+from ..programs import ExternalProgram, OverrideProgram
+from ..build import CustomTarget, CustomTargetIndex, GeneratedList
+
+if T.TYPE_CHECKING:
+    from ..compilers import Compiler
+    from ..interpreter import Interpreter
+
+# gresource compilation is broken due to the way
+# the resource compiler and Ninja clash about it
+#
+# https://github.com/ninja-build/ninja/issues/1184
+# https://bugzilla.gnome.org/show_bug.cgi?id=774368
+gresource_dep_needed_version = '>= 2.51.1'
+
+native_glib_version = None
+
+class GnomeModule(ExtensionModule):
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__(interpreter)
+        self.gir_dep = None
+        self.install_glib_compile_schemas = False
+        self.install_gio_querymodules = []
+        self.install_gtk_update_icon_cache = False
+        self.install_update_desktop_database = False
+        self.devenv = None
+        self.methods.update({
+            'post_install': self.post_install,
+            'compile_resources': self.compile_resources,
+            'generate_gir': self.generate_gir,
+            'compile_schemas': self.compile_schemas,
+            'yelp': self.yelp,
+            'gtkdoc': self.gtkdoc,
+            'gtkdoc_html_dir': self.gtkdoc_html_dir,
+            'gdbus_codegen': self.gdbus_codegen,
+            'mkenums': self.mkenums,
+            'mkenums_simple': self.mkenums_simple,
+            'genmarshal': self.genmarshal,
+            'generate_vapi': self.generate_vapi,
+        })
+
+    @staticmethod
+    def _get_native_glib_version(state):
+        global native_glib_version
+        if native_glib_version is None:
+            glib_dep = PkgConfigDependency('glib-2.0', state.environment,
+                                           {'native': True, 'required': False})
+            if glib_dep.found():
+                native_glib_version = glib_dep.get_version()
+            else:
+                mlog.warning('Could not detect glib version, assuming 2.54. '
+                             'You may get build errors if your glib is older.')
+                native_glib_version = '2.54'
+        return native_glib_version
+
+    @mesonlib.run_once
+    def __print_gresources_warning(self, state):
+        if not mesonlib.version_compare(self._get_native_glib_version(state),
+                                        gresource_dep_needed_version):
+            mlog.warning('GLib compiled dependencies do not work reliably with \n'
+                         'the current version of GLib. See the following upstream issue:',
+                         mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
+
+    @staticmethod
+    def _print_gdbus_warning():
+        mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
+                     '  include_directories of targets with GLib < 2.51.3:',
+                     mlog.bold('https://github.com/mesonbuild/meson/issues/1387'),
+                     once=True)
+
+    def _get_dep(self, state, depname, native=False, required=True):
+        kwargs = {'native': native, 'required': required}
+        return self.interpreter.func_dependency(state.current_node, [depname], kwargs)
+
+    def _get_native_binary(self, state, name, depname, varname, required=True):
+        # Look in overrides in case glib/gtk/etc are built as subproject
+        prog = self.interpreter.program_from_overrides([name], [])
+        if prog is not None:
+            return prog
+
+        # Look in machine file
+        prog = state.environment.lookup_binary_entry(MachineChoice.HOST, name)
+        if prog is not None:
+            return ExternalProgram.from_entry(name, prog)
+
+        # Check if pkgconfig has a variable
+        dep = self._get_dep(state, depname, native=True, required=False)
+        if dep.found() and dep.type_name == 'pkgconfig':
+            value = dep.get_pkgconfig_variable(varname, {})
+            if value:
+                return ExternalProgram(name, value)
+
+        # Normal program lookup
+        return state.find_program(name, required=required)
+
+    @typed_kwargs('gnome.post_install',
+        KwargInfo('glib_compile_schemas', bool, default=False),
+        KwargInfo('gio_querymodules', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('gtk_update_icon_cache', bool, default=False),
+        KwargInfo('update_desktop_database', bool, default=False, since='0.59.0'),
+    )
+    @noPosargs
+    @FeatureNew('gnome.post_install', '0.57.0')
+    def post_install(self, state, args, kwargs):
+        rv = []
+        datadir_abs = os.path.join(state.environment.get_prefix(), state.environment.get_datadir())
+        if kwargs['glib_compile_schemas'] and not self.install_glib_compile_schemas:
+            self.install_glib_compile_schemas = True
+            prog = self._get_native_binary(state, 'glib-compile-schemas', 'gio-2.0', 'glib_compile_schemas')
+            schemasdir = os.path.join(datadir_abs, 'glib-2.0', 'schemas')
+            script = state.backend.get_executable_serialisation([prog, schemasdir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        for d in kwargs['gio_querymodules']:
+            if d not in self.install_gio_querymodules:
+                self.install_gio_querymodules.append(d)
+                prog = self._get_native_binary(state, 'gio-querymodules', 'gio-2.0', 'gio_querymodules')
+                moduledir = os.path.join(state.environment.get_prefix(), d)
+                script = state.backend.get_executable_serialisation([prog, moduledir])
+                script.skip_if_destdir = True
+                rv.append(script)
+        if kwargs['gtk_update_icon_cache'] and not self.install_gtk_update_icon_cache:
+            self.install_gtk_update_icon_cache = True
+            prog = self._get_native_binary(state, 'gtk4-update-icon-cache', 'gtk-4.0', 'gtk4_update_icon_cache', required=False)
+            found = isinstance(prog, build.Executable) or prog.found()
+            if not found:
+                prog = self._get_native_binary(state, 'gtk-update-icon-cache', 'gtk+-3.0', 'gtk_update_icon_cache')
+            icondir = os.path.join(datadir_abs, 'icons', 'hicolor')
+            script = state.backend.get_executable_serialisation([prog, '-q', '-t' ,'-f', icondir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        if kwargs['update_desktop_database'] and not self.install_update_desktop_database:
+            self.install_update_desktop_database = True
+            prog = self._get_native_binary(state, 'update-desktop-database', 'desktop-file-utils', 'update_desktop_database')
+            appdir = os.path.join(datadir_abs, 'applications')
+            script = state.backend.get_executable_serialisation([prog, '-q', appdir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        return ModuleReturnValue(None, rv)
+
+    @FeatureNewKwargs('gnome.compile_resources', '0.37.0', ['gresource_bundle', 'export', 'install_header'])
+    @permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
+                      'install', 'install_dir', 'extra_args', 'build_by_default'})
+    def compile_resources(self, state, args, kwargs):
+        self.__print_gresources_warning(state)
+        glib_version = self._get_native_glib_version(state)
+
+        glib_compile_resources = state.find_program('glib-compile-resources')
+        cmd = [glib_compile_resources, '@INPUT@']
+
+        source_dirs, dependencies = [mesonlib.extract_as_list(kwargs, c, pop=True) for c in  ['source_dir', 'dependencies']]
+
+        if len(args) < 2:
+            raise MesonException('Not enough arguments; the name of the resource '
+                                 'and the path to the XML file are required')
+
+        # Validate dependencies
+        subdirs = []
+        depends = []
+        for (ii, dep) in enumerate(dependencies):
+            if isinstance(dep, mesonlib.File):
+                subdirs.append(dep.subdir)
+            elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+                depends.append(dep)
+                subdirs.append(dep.get_subdir())
+                if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+                    m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \
+                        'be used with the current version of glib-compile-resources due to\n' \
+                        ''
+                    raise MesonException(m)
+            else:
+                m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
+                    '"dependencies" argument.\nPlease pass the return value of ' \
+                    'custom_target() or configure_file()'
+                raise MesonException(m.format(dep))
+
+        if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+            ifile = args[1]
+            if isinstance(ifile, mesonlib.File):
+                # glib-compile-resources will be run inside the source dir,
+                # so we need either 'src_to_build' or the absolute path.
+                # Absolute path is the easiest choice.
+                if ifile.is_built:
+                    ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
+                else:
+                    ifile = os.path.join(ifile.subdir, ifile.fname)
+            elif isinstance(ifile, str):
+                ifile = os.path.join(state.subdir, ifile)
+            elif isinstance(ifile, (build.CustomTarget,
+                                    build.CustomTargetIndex,
+                                    build.GeneratedList)):
+                m = 'Resource xml files generated at build-time cannot be used ' \
+                    'with gnome.compile_resources() because we need to scan ' \
+                    'the xml for dependencies. Use configure_file() instead ' \
+                    'to generate it at configure-time.'
+                raise MesonException(m)
+            else:
+                raise MesonException(f'Invalid file argument: {ifile!r}')
+            depend_files, depends, subdirs = self._get_gresource_dependencies(
+                state, ifile, source_dirs, dependencies)
+
+        # Make source dirs relative to build dir now
+        source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
+        # Ensure build directories of generated deps are included
+        source_dirs += subdirs
+        # Always include current directory, but after paths set by user
+        source_dirs.append(os.path.join(state.build_to_src, state.subdir))
+
+        for source_dir in OrderedSet(source_dirs):
+            cmd += ['--sourcedir', source_dir]
+
+        if 'c_name' in kwargs:
+            cmd += ['--c-name', kwargs.pop('c_name')]
+        export = kwargs.pop('export', False)
+        if not export:
+            cmd += ['--internal']
+
+        cmd += ['--generate', '--target', '@OUTPUT@']
+
+        cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
+
+        gresource = kwargs.pop('gresource_bundle', False)
+        if gresource:
+            output = args[0] + '.gresource'
+            name = args[0] + '_gresource'
+        else:
+            if 'c' in state.environment.coredata.compilers.host.keys():
+                output = args[0] + '.c'
+                name = args[0] + '_c'
+            elif 'cpp' in state.environment.coredata.compilers.host.keys():
+                output = args[0] + '.cpp'
+                name = args[0] + '_cpp'
+            else:
+                raise MesonException('Compiling GResources into code is only supported in C and C++ projects')
+
+        if kwargs.get('install', False) and not gresource:
+            raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
+
+        install_header = kwargs.pop('install_header', False)
+        if install_header and gresource:
+            raise MesonException('The install_header kwarg does not apply to gresource bundles')
+        if install_header and not export:
+            raise MesonException('GResource header is installed yet export is not enabled')
+
+        kwargs['input'] = args[1]
+        kwargs['output'] = output
+        kwargs['depends'] = depends
+        if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+            # This will eventually go out of sync if dependencies are added
+            kwargs['depend_files'] = depend_files
+            kwargs['command'] = cmd
+        else:
+            depfile = f'{output}.d'
+            kwargs['depfile'] = depfile
+            kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+        target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
+
+        if gresource: # Only one target for .gresource files
+            return ModuleReturnValue(target_c, [target_c])
+
+        h_kwargs = {
+            'command': cmd,
+            'input': args[1],
+            'output': args[0] + '.h',
+            # The header doesn't actually care about the files yet it errors if missing
+            'depends': depends
+        }
+        if 'build_by_default' in kwargs:
+            h_kwargs['build_by_default'] = kwargs['build_by_default']
+        if install_header:
+            h_kwargs['install'] = install_header
+            h_kwargs['install_dir'] = kwargs.get('install_dir',
+                                                 state.environment.coredata.get_option(mesonlib.OptionKey('includedir')))
+        target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs)
+        rv = [target_c, target_h]
+        return ModuleReturnValue(rv, rv)
+
+    def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
+
+        cmd = ['glib-compile-resources',
+               input_file,
+               '--generate-dependencies']
+
+        # Prefer generated files over source files
+        cmd += ['--sourcedir', state.subdir] # Current build dir
+        for source_dir in source_dirs:
+            cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
+
+        try:
+            pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
+        except (FileNotFoundError, PermissionError):
+            raise MesonException('Could not execute glib-compile-resources.')
+        if pc.returncode != 0:
+            m = 'glib-compile-resources failed to get dependencies for {}:\n{}'
+            mlog.warning(m.format(cmd[1], stderr))
+            raise subprocess.CalledProcessError(pc.returncode, cmd)
+
+        dep_files = stdout.split('\n')[:-1]
+
+        depends = []
+        subdirs = []
+        for resfile in dep_files[:]:
+            resbasename = os.path.basename(resfile)
+            for dep in dependencies:
+                if isinstance(dep, mesonlib.File):
+                    if dep.fname != resbasename:
+                        continue
+                    dep_files.remove(resfile)
+                    dep_files.append(dep)
+                    subdirs.append(dep.subdir)
+                    break
+                elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+                    fname = None
+                    outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()}
+                    for o, baseo in outputs:
+                        if baseo == resbasename:
+                            fname = o
+                            break
+                    if fname is not None:
+                        dep_files.remove(resfile)
+                        depends.append(dep)
+                        subdirs.append(dep.get_subdir())
+                        break
+            else:
+                # In generate-dependencies mode, glib-compile-resources doesn't raise
+                # an error for missing resources but instead prints whatever filename
+                # was listed in the input file.  That's good because it means we can
+                # handle resource files that get generated as part of the build, as
+                # follows.
+                #
+                # If there are multiple generated resource files with the same basename
+                # then this code will get confused.
+                try:
+                    f = mesonlib.File.from_source_file(state.environment.get_source_dir(),
+                                                       ".", resfile)
+                except MesonException:
+                    raise MesonException(
+                        'Resource "%s" listed in "%s" was not found. If this is a '
+                        'generated file, pass the target that generates it to '
+                        'gnome.compile_resources() using the "dependencies" '
+                        'keyword argument.' % (resfile, input_file))
+                dep_files.remove(resfile)
+                dep_files.append(f)
+        return dep_files, depends, subdirs
+
+    def _get_link_args(self, state, lib, depends, include_rpath=False,
+                       use_gir_args=False):
+        link_command = []
+        # Construct link args
+        if isinstance(lib, build.SharedLibrary):
+            libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib))
+            link_command.append('-L' + libdir)
+            if include_rpath:
+                link_command.append('-Wl,-rpath,' + libdir)
+            depends.append(lib)
+            # Needed for the following binutils bug:
+            # https://github.com/mesonbuild/meson/issues/1911
+            # However, g-ir-scanner does not understand -Wl,-rpath
+            # so we need to use -L instead
+            for d in state.backend.determine_rpath_dirs(lib):
+                d = os.path.join(state.environment.get_build_dir(), d)
+                link_command.append('-L' + d)
+                if include_rpath:
+                    link_command.append('-Wl,-rpath,' + d)
+        if use_gir_args and self._gir_has_option('--extra-library'):
+            link_command.append('--extra-library=' + lib.name)
+        else:
+            link_command.append('-l' + lib.name)
+        return link_command
+
+    def _get_dependencies_flags(self, deps, state, depends, include_rpath=False,
+                                use_gir_args=False, separate_nodedup=False):
+        cflags = OrderedSet()
+        internal_ldflags = OrderedSet()
+        external_ldflags = OrderedSet()
+        # External linker flags that can't be de-duped reliably because they
+        # require two args in order, such as -framework AVFoundation
+        external_ldflags_nodedup = []
+        gi_includes = OrderedSet()
+        deps = mesonlib.listify(deps)
+
+        for dep in deps:
+            if isinstance(dep, Dependency):
+                girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+                if girdir:
+                    gi_includes.update([girdir])
+            if isinstance(dep, InternalDependency):
+                cflags.update(dep.get_compile_args())
+                cflags.update(state.get_include_args(dep.include_directories))
+                for lib in dep.libraries:
+                    if isinstance(lib, build.SharedLibrary):
+                        internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
+                        libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
+                                                                   use_gir_args, True)
+                        cflags.update(libdepflags[0])
+                        internal_ldflags.update(libdepflags[1])
+                        external_ldflags.update(libdepflags[2])
+                        external_ldflags_nodedup += libdepflags[3]
+                        gi_includes.update(libdepflags[4])
+                extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath,
+                                                           use_gir_args, True)
+                cflags.update(extdepflags[0])
+                internal_ldflags.update(extdepflags[1])
+                external_ldflags.update(extdepflags[2])
+                external_ldflags_nodedup += extdepflags[3]
+                gi_includes.update(extdepflags[4])
+                for source in dep.sources:
+                    if isinstance(source, GirTarget):
+                        gi_includes.update([os.path.join(state.environment.get_build_dir(),
+                                            source.get_subdir())])
+            # This should be any dependency other than an internal one.
+            elif isinstance(dep, Dependency):
+                cflags.update(dep.get_compile_args())
+                ldflags = iter(dep.get_link_args(raw=True))
+                for lib in ldflags:
+                    if (os.path.isabs(lib) and
+                            # For PkgConfigDependency only:
+                            getattr(dep, 'is_libtool', False)):
+                        lib_dir = os.path.dirname(lib)
+                        external_ldflags.update(["-L%s" % lib_dir])
+                        if include_rpath:
+                            external_ldflags.update([f'-Wl,-rpath {lib_dir}'])
+                        libname = os.path.basename(lib)
+                        if libname.startswith("lib"):
+                            libname = libname[3:]
+                        libname = libname.split(".so")[0]
+                        lib = "-l%s" % libname
+                    # FIXME: Hack to avoid passing some compiler options in
+                    if lib.startswith("-W"):
+                        continue
+                    # If it's a framework arg, slurp the framework name too
+                    # to preserve the order of arguments
+                    if lib == '-framework':
+                        external_ldflags_nodedup += [lib, next(ldflags)]
+                    else:
+                        external_ldflags.update([lib])
+            elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+                cflags.update(state.get_include_args(dep.get_include_dirs()))
+                depends.append(dep)
+            else:
+                mlog.log(f'dependency {dep!r} not handled to build gir files')
+                continue
+
+        if use_gir_args and self._gir_has_option('--extra-library'):
+            def fix_ldflags(ldflags):
+                fixed_ldflags = OrderedSet()
+                for ldflag in ldflags:
+                    if ldflag.startswith("-l"):
+                        ldflag = ldflag.replace('-l', '--extra-library=', 1)
+                    fixed_ldflags.add(ldflag)
+                return fixed_ldflags
+            internal_ldflags = fix_ldflags(internal_ldflags)
+            external_ldflags = fix_ldflags(external_ldflags)
+        if not separate_nodedup:
+            external_ldflags.update(external_ldflags_nodedup)
+            return cflags, internal_ldflags, external_ldflags, gi_includes
+        else:
+            return cflags, internal_ldflags, external_ldflags, external_ldflags_nodedup, gi_includes
+
+    def _unwrap_gir_target(self, girtarget, state):
+        if not isinstance(girtarget, (build.Executable, build.SharedLibrary,
+                                      build.StaticLibrary)):
+            raise MesonException(f'Gir target must be an executable or library but is "{girtarget}" of type {type(girtarget).__name__}')
+
+        STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1"
+        if isinstance(girtarget, (build.StaticLibrary)) and \
+           not mesonlib.version_compare(
+               self._get_gir_dep(state)[0].get_version(),
+               STATIC_BUILD_REQUIRED_VERSION):
+            raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION)
+
+        return girtarget
+
+    def _devenv_append(self, varname: str, value: str) -> None:
+        if self.devenv is None:
+            self.devenv = build.EnvironmentVariables()
+            self.interpreter.build.devenv.append(self.devenv)
+        self.devenv.append(varname, [value])
+
+    def _get_gir_dep(self, state):
+        if not self.gir_dep:
+            self.gir_dep = self._get_dep(state, 'gobject-introspection-1.0')
+            self.giscanner = self._get_native_binary(state, 'g-ir-scanner', 'gobject-introspection-1.0', 'g_ir_scanner')
+            self.gicompiler = self._get_native_binary(state, 'g-ir-compiler', 'gobject-introspection-1.0', 'g_ir_compiler')
+        return self.gir_dep, self.giscanner, self.gicompiler
+
+    @functools.lru_cache(maxsize=None)
+    def _gir_has_option(self, option) -> bool:
+        exe = self.giscanner
+        if isinstance(exe, OverrideProgram):
+            # Handle overridden g-ir-scanner
+            assert option in ['--extra-library', '--sources-top-dirs']
+            return True
+        p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT)
+        return p.returncode == 0 and option in o
+
+    def _scan_header(self, kwargs):
+        ret = []
+        header = kwargs.pop('header', None)
+        if header:
+            if not isinstance(header, str):
+                raise MesonException('header must be a string')
+            ret = ['--c-include=' + header]
+        return ret
+
+    def _scan_extra_args(self, kwargs):
+        return mesonlib.stringlistify(kwargs.pop('extra_args', []))
+
+    def _scan_link_withs(self, state, depends, kwargs):
+        ret = []
+        if 'link_with' in kwargs:
+            link_with = mesonlib.extract_as_list(kwargs, 'link_with', pop = True)
+
+            for link in link_with:
+                ret += self._get_link_args(state, link, depends,
+                                           use_gir_args=True)
+        return ret
+
+    # May mutate depends and gir_inc_dirs
+    def _scan_include(self, state, depends, gir_inc_dirs, kwargs):
+        ret = []
+
+        if 'includes' in kwargs:
+            includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True)
+            for inc in includes:
+                if isinstance(inc, str):
+                    ret += [f'--include={inc}']
+                elif isinstance(inc, GirTarget):
+                    gir_inc_dirs += [
+                        os.path.join(state.environment.get_build_dir(),
+                                     inc.get_subdir()),
+                    ]
+                    ret += [
+                        "--include-uninstalled={}".format(os.path.join(inc.get_subdir(), inc.get_basename()))
+                    ]
+                    depends += [inc]
+                else:
+                    raise MesonException(
+                        'Gir includes must be str, GirTarget, or list of them. '
+                        'Got %s.' % type(inc).__name__)
+
+        return ret
+
+    def _scan_symbol_prefix(self, kwargs):
+        ret = []
+
+        if 'symbol_prefix' in kwargs:
+            sym_prefixes = mesonlib.stringlistify(kwargs.pop('symbol_prefix', []))
+            ret += ['--symbol-prefix=%s' % sym_prefix for sym_prefix in sym_prefixes]
+
+        return ret
+
+    def _scan_identifier_prefix(self, kwargs):
+        ret = []
+
+        if 'identifier_prefix' in kwargs:
+            identifier_prefix = kwargs.pop('identifier_prefix')
+            if not isinstance(identifier_prefix, str):
+                raise MesonException('Gir identifier prefix must be str')
+            ret += ['--identifier-prefix=%s' % identifier_prefix]
+
+        return ret
+
+    def _scan_export_packages(self, kwargs):
+        ret = []
+
+        if 'export_packages' in kwargs:
+            pkgs = kwargs.pop('export_packages')
+            if isinstance(pkgs, str):
+                ret += ['--pkg-export=%s' % pkgs]
+            elif isinstance(pkgs, list):
+                ret += ['--pkg-export=%s' % pkg for pkg in pkgs]
+            else:
+                raise MesonException('Gir export packages must be str or list')
+
+        return ret
+
+    def _scan_inc_dirs(self, kwargs):
+        ret = mesonlib.extract_as_list(kwargs, 'include_directories', pop = True)
+        for incd in ret:
+            if not isinstance(incd, (str, build.IncludeDirs)):
+                raise MesonException(
+                    'Gir include dirs should be include_directories().')
+        return ret
+
+    def _scan_langs(self, state, langs):
+        ret = []
+
+        for lang in langs:
+            link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang)
+            for link_arg in link_args:
+                if link_arg.startswith('-L'):
+                    ret.append(link_arg)
+
+        return ret
+
+    def _scan_gir_targets(self, state, girtargets):
+        ret = []
+
+        for girtarget in girtargets:
+            if isinstance(girtarget, build.Executable):
+                ret += ['--program', girtarget]
+            else:
+                # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72
+                # we can't use the full path until this is merged.
+                libpath = os.path.join(girtarget.get_subdir(), girtarget.get_filename())
+                # Must use absolute paths here because g-ir-scanner will not
+                # add them to the runtime path list if they're relative. This
+                # means we cannot use @BUILD_ROOT@
+                build_root = state.environment.get_build_dir()
+                if isinstance(girtarget, build.SharedLibrary):
+                    # need to put our output directory first as we need to use the
+                    # generated libraries instead of any possibly installed system/prefix
+                    # ones.
+                    ret += ["-L{}/{}".format(build_root, os.path.dirname(libpath))]
+                    libname = girtarget.get_basename()
+                else:
+                    libname = os.path.join(f"{build_root}/{libpath}")
+                ret += ['--library', libname]
+                # Needed for the following binutils bug:
+                # https://github.com/mesonbuild/meson/issues/1911
+                # However, g-ir-scanner does not understand -Wl,-rpath
+                # so we need to use -L instead
+                for d in state.backend.determine_rpath_dirs(girtarget):
+                    d = os.path.join(state.environment.get_build_dir(), d)
+                    ret.append('-L' + d)
+
+        return ret
+
+    def _get_girtargets_langs_compilers(self, girtargets: T.List[GirTarget]) -> T.List[T.Tuple[str, 'Compiler']]:
+        ret: T.List[T.Tuple[str, 'Compiler']] = []
+        for girtarget in girtargets:
+            for lang, compiler in girtarget.compilers.items():
+                # XXX: Can you use g-i with any other language?
+                if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'):
+                    ret.append((lang, compiler))
+                    break
+
+        return ret
+
+    def _get_gir_targets_deps(self, girtargets):
+        ret = []
+        for girtarget in girtargets:
+            ret += girtarget.get_all_link_deps()
+            ret += girtarget.get_external_deps()
+        return ret
+
+    def _get_gir_targets_inc_dirs(self, girtargets):
+        ret = []
+        for girtarget in girtargets:
+            ret += girtarget.get_include_dirs()
+        return ret
+
+    def _get_langs_compilers_flags(self, state, langs_compilers: T.List[T.Tuple[str, 'Compiler']]):
+        cflags = []
+        internal_ldflags = []
+        external_ldflags = []
+
+        for lang, compiler in langs_compilers:
+            if state.global_args.get(lang):
+                cflags += state.global_args[lang]
+            if state.project_args.get(lang):
+                cflags += state.project_args[lang]
+            if mesonlib.OptionKey('b_sanitize') in compiler.base_options:
+                sanitize = state.environment.coredata.options[mesonlib.OptionKey('b_sanitize')].value
+                cflags += compiler.sanitizer_compile_args(sanitize)
+                sanitize = sanitize.split(',')
+                # These must be first in ldflags
+                if 'address' in sanitize:
+                    internal_ldflags += ['-lasan']
+                if 'thread' in sanitize:
+                    internal_ldflags += ['-ltsan']
+                if 'undefined' in sanitize:
+                    internal_ldflags += ['-lubsan']
+                # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner
+                # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
+                # ldflags += compiler.sanitizer_link_args(sanitize)
+
+        return cflags, internal_ldflags, external_ldflags
+
+    def _make_gir_filelist(self, state, srcdir, ns, nsversion, girtargets, libsources):
+        gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0])
+        if not os.path.isdir(gir_filelist_dir):
+            os.mkdir(gir_filelist_dir)
+        gir_filelist_filename = os.path.join(gir_filelist_dir, f'{ns}_{nsversion}_gir_filelist')
+
+        with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
+            for s in libsources:
+                if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+                    for custom_output in s.get_outputs():
+                        gir_filelist.write(os.path.join(state.environment.get_build_dir(),
+                                                        state.backend.get_target_dir(s),
+                                                        custom_output) + '\n')
+                elif isinstance(s, mesonlib.File):
+                    gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n')
+                elif isinstance(s, build.GeneratedList):
+                    for gen_src in s.get_outputs():
+                        gir_filelist.write(os.path.join(srcdir, gen_src) + '\n')
+                else:
+                    gir_filelist.write(os.path.join(srcdir, s) + '\n')
+
+        return gir_filelist_filename
+
+    def _make_gir_target(self, state, girfile, scan_command, generated_files, depends, kwargs):
+        scankwargs = {'input': generated_files,
+                      'output': girfile,
+                      'command': scan_command,
+                      'depends': depends}
+
+        if 'install' in kwargs:
+            scankwargs['install'] = kwargs['install']
+            scankwargs['install_dir'] = kwargs.get('install_dir_gir',
+                                                   os.path.join(state.environment.get_datadir(), 'gir-1.0'))
+
+        if 'build_by_default' in kwargs:
+            scankwargs['build_by_default'] = kwargs['build_by_default']
+
+        return GirTarget(girfile, state.subdir, state.subproject, scankwargs)
+
+    def _make_typelib_target(self, state, typelib_output, typelib_cmd, generated_files, kwargs):
+        typelib_kwargs = {
+            'input': generated_files,
+            'output': typelib_output,
+            'command': typelib_cmd,
+        }
+
+        if 'install' in kwargs:
+            typelib_kwargs['install'] = kwargs['install']
+            typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib',
+                                                       os.path.join(state.environment.get_libdir(), 'girepository-1.0'))
+
+        if 'build_by_default' in kwargs:
+            typelib_kwargs['build_by_default'] = kwargs['build_by_default']
+
+        return TypelibTarget(typelib_output, state.subdir, state.subproject, typelib_kwargs)
+
+    # May mutate depends
+    def _gather_typelib_includes_and_update_depends(self, state, deps, depends):
+        # Need to recursively add deps on GirTarget sources from our
+        # dependencies and also find the include directories needed for the
+        # typelib generation custom target below.
+        typelib_includes = []
+        for dep in deps:
+            # Add a dependency on each GirTarget listed in dependencies and add
+            # the directory where it will be generated to the typelib includes
+            if isinstance(dep, InternalDependency):
+                for source in dep.sources:
+                    if isinstance(source, GirTarget) and source not in depends:
+                        depends.append(source)
+                        subdir = os.path.join(state.environment.get_build_dir(),
+                                              source.get_subdir())
+                        if subdir not in typelib_includes:
+                            typelib_includes.append(subdir)
+            # Do the same, but for dependencies of dependencies. These are
+            # stored in the list of generated sources for each link dep (from
+            # girtarget.get_all_link_deps() above).
+            # FIXME: Store this in the original form from declare_dependency()
+            # so it can be used here directly.
+            elif isinstance(dep, build.SharedLibrary):
+                for source in dep.generated:
+                    if isinstance(source, GirTarget):
+                        subdir = os.path.join(state.environment.get_build_dir(),
+                                              source.get_subdir())
+                        if subdir not in typelib_includes:
+                            typelib_includes.append(subdir)
+            if isinstance(dep, Dependency):
+                girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+                if girdir and girdir not in typelib_includes:
+                    typelib_includes.append(girdir)
+        return typelib_includes
+
+    def _get_external_args_for_langs(self, state, langs):
+        ret = []
+        for lang in langs:
+            ret += state.environment.coredata.get_external_args(MachineChoice.HOST, lang)
+        return ret
+
+    @staticmethod
+    def _get_scanner_cflags(cflags):
+        'g-ir-scanner only accepts -I/-D/-U; must ignore all other flags'
+        for f in cflags:
+            # _FORTIFY_SOURCE depends on / works together with -O, on the other hand this
+            # just invokes the preprocessor anyway
+            if f.startswith(('-D', '-U', '-I')) and not f.startswith('-D_FORTIFY_SOURCE'):
+                yield f
+
+    @staticmethod
+    def _get_scanner_ldflags(ldflags):
+        'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags'
+        for f in ldflags:
+            if f.startswith(('-L', '-l', '--extra-library')):
+                yield f
+
+    @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings'])
+    @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default'])
+    @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
+                      'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
+                      'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
+                      'packages', 'header', 'build_by_default', 'fatal_warnings'})
+    def generate_gir(self, state, args, kwargs: T.Dict[str, T.Any]):
+        if not args:
+            raise MesonException('generate_gir takes at least one argument')
+        if kwargs.get('install_dir'):
+            raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
+
+        girtargets = [self._unwrap_gir_target(arg, state) for arg in args]
+
+        if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]):
+            raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
+
+        gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
+
+        ns = kwargs.get('namespace')
+        if not ns:
+            raise MesonException('Missing "namespace" keyword argument')
+        nsversion = kwargs.get('nsversion')
+        if not nsversion:
+            raise MesonException('Missing "nsversion" keyword argument')
+        libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
+        girfile = f'{ns}-{nsversion}.gir'
+        srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
+        builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
+        depends = gir_dep.sources + girtargets
+        gir_inc_dirs = []
+        langs_compilers = self._get_girtargets_langs_compilers(girtargets)
+        cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
+        deps = self._get_gir_targets_deps(girtargets)
+        deps += extract_as_list(kwargs, 'dependencies', pop=True)
+        deps += [gir_dep]
+        typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
+        # ldflags will be misinterpreted by gir scanner (showing
+        # spurious dependencies) but building GStreamer fails if they
+        # are not used here.
+        dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes = \
+            self._get_dependencies_flags(deps, state, depends, use_gir_args=True)
+        cflags += list(self._get_scanner_cflags(dep_cflags))
+        cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers])))
+        internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags))
+        external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags))
+        girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets)
+        inc_dirs = self._scan_inc_dirs(kwargs)
+
+        scan_command = [giscanner]
+        scan_command += ['--no-libtool']
+        scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
+        scan_command += ['--warn-all']
+        scan_command += ['--output', '@OUTPUT@']
+        scan_command += self._scan_header(kwargs)
+        scan_command += self._scan_extra_args(kwargs)
+        scan_command += ['-I' + srcdir, '-I' + builddir]
+        scan_command += state.get_include_args(girtargets_inc_dirs)
+        scan_command += ['--filelist=' + self._make_gir_filelist(state, srcdir, ns, nsversion, girtargets, libsources)]
+        scan_command += self._scan_link_withs(state, depends, kwargs)
+        scan_command += self._scan_include(state, depends, gir_inc_dirs, kwargs)
+        scan_command += self._scan_symbol_prefix(kwargs)
+        scan_command += self._scan_identifier_prefix(kwargs)
+        scan_command += self._scan_export_packages(kwargs)
+        scan_command += ['--cflags-begin']
+        scan_command += cflags
+        scan_command += ['--cflags-end']
+        scan_command += state.get_include_args(inc_dirs)
+        scan_command += state.get_include_args(list(gi_includes) + gir_inc_dirs + inc_dirs, prefix='--add-include-path=')
+        scan_command += list(internal_ldflags)
+        scan_command += self._scan_gir_targets(state, girtargets)
+        scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
+        scan_command += list(external_ldflags)
+
+        if self._gir_has_option('--sources-top-dirs'):
+            scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)]
+            scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)]
+
+        if '--warn-error' in scan_command:
+            mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55')
+        fatal_warnings = kwargs.get('fatal_warnings', False)
+        if not isinstance(fatal_warnings, bool):
+            raise MesonException('fatal_warnings keyword argument must be a boolean')
+        if fatal_warnings:
+            scan_command.append('--warn-error')
+
+        generated_files = [f for f in libsources if isinstance(f, (GeneratedList, CustomTarget, CustomTargetIndex))]
+
+        scan_target = self._make_gir_target(state, girfile, scan_command, generated_files, depends, kwargs)
+
+        typelib_output = f'{ns}-{nsversion}.typelib'
+        typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@']
+        typelib_cmd += state.get_include_args(gir_inc_dirs, prefix='--includedir=')
+
+        for incdir in typelib_includes:
+            typelib_cmd += ["--includedir=" + incdir]
+
+        typelib_target = self._make_typelib_target(state, typelib_output, typelib_cmd, generated_files, kwargs)
+
+        self._devenv_append('GI_TYPELIB_PATH', os.path.join(state.environment.get_build_dir(), state.subdir))
+
+        rv = [scan_target, typelib_target]
+
+        return ModuleReturnValue(rv, rv)
+
+    @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+    @permittedKwargs({'build_by_default', 'depend_files'})
+    def compile_schemas(self, state, args, kwargs):
+        if args:
+            raise MesonException('Compile_schemas does not take positional arguments.')
+        srcdir = os.path.join(state.build_to_src, state.subdir)
+        outdir = state.subdir
+
+        cmd = [state.find_program('glib-compile-schemas')]
+        cmd += ['--targetdir', outdir, srcdir]
+        kwargs['command'] = cmd
+        kwargs['input'] = []
+        kwargs['output'] = 'gschemas.compiled'
+        if state.subdir == '':
+            targetname = 'gsettings-compile'
+        else:
+            targetname = 'gsettings-compile-' + state.subdir.replace('/', '_')
+        target_g = build.CustomTarget(targetname, state.subdir, state.subproject, kwargs)
+        self._devenv_append('GSETTINGS_SCHEMA_DIR', os.path.join(state.environment.get_build_dir(), state.subdir))
+        return ModuleReturnValue(target_g, [target_g])
+
+    @permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
+    @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'],
+                             'Use a LINGUAS file in the source directory instead')
+    def yelp(self, state, args, kwargs):
+        if len(args) < 1:
+            raise MesonException('Yelp requires a project id')
+
+        project_id = args[0]
+        sources = mesonlib.stringlistify(kwargs.pop('sources', []))
+        if not sources:
+            if len(args) > 1:
+                sources = mesonlib.stringlistify(args[1:])
+            if not sources:
+                raise MesonException('Yelp requires a list of sources')
+        source_str = '@@'.join(sources)
+
+        langs = mesonlib.stringlistify(kwargs.pop('languages', []))
+        media = mesonlib.stringlistify(kwargs.pop('media', []))
+        symlinks = kwargs.pop('symlink_media', True)
+
+        if not isinstance(symlinks, bool):
+            raise MesonException('symlink_media must be a boolean')
+
+        if kwargs:
+            raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys())))
+
+        script = state.environment.get_build_command()
+        args = ['--internal',
+                'yelphelper',
+                'install',
+                '--subdir=' + state.subdir,
+                '--id=' + project_id,
+                '--installdir=' + os.path.join(state.environment.get_datadir(), 'help'),
+                '--sources=' + source_str]
+        if symlinks:
+            args.append('--symlinks=true')
+        if media:
+            args.append('--media=' + '@@'.join(media))
+        if langs:
+            args.append('--langs=' + '@@'.join(langs))
+        inscript = state.backend.get_executable_serialisation(script + args)
+
+        potargs = state.environment.get_build_command() + [
+            '--internal', 'yelphelper', 'pot',
+            '--subdir=' + state.subdir,
+            '--id=' + project_id,
+            '--sources=' + source_str,
+        ]
+        pottarget = build.RunTarget('help-' + project_id + '-pot', potargs,
+                                    [], state.subdir, state.subproject)
+
+        poargs = state.environment.get_build_command() + [
+            '--internal', 'yelphelper', 'update-po',
+            '--subdir=' + state.subdir,
+            '--id=' + project_id,
+            '--sources=' + source_str,
+            '--langs=' + '@@'.join(langs),
+        ]
+        potarget = build.RunTarget('help-' + project_id + '-update-po', poargs,
+                                   [], state.subdir, state.subproject)
+
+        rv = [inscript, pottarget, potarget]
+        return ModuleReturnValue(None, rv)
+
+    @FeatureNewKwargs('gnome.gtkdoc', '0.52.0', ['check'])
+    @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['c_args'])
+    @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['module_version'])
+    @FeatureNewKwargs('gnome.gtkdoc', '0.37.0', ['namespace', 'mode'])
+    @permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install',
+                      'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile',
+                      'fixxref_args', 'html_args', 'html_assets', 'content_files',
+                      'mkdb_args', 'ignore_headers', 'include_directories',
+                      'namespace', 'mode', 'expand_content_files', 'module_version',
+                      'c_args', 'check'})
+    def gtkdoc(self, state, args, kwargs):
+        if len(args) != 1:
+            raise MesonException('Gtkdoc must have one positional argument.')
+        modulename = args[0]
+        if not isinstance(modulename, str):
+            raise MesonException('Gtkdoc arg must be string.')
+        if 'src_dir' not in kwargs:
+            raise MesonException('Keyword argument src_dir missing.')
+        main_file = kwargs.get('main_sgml', '')
+        if not isinstance(main_file, str):
+            raise MesonException('Main sgml keyword argument must be a string.')
+        main_xml = kwargs.get('main_xml', '')
+        if not isinstance(main_xml, str):
+            raise MesonException('Main xml keyword argument must be a string.')
+        moduleversion = kwargs.get('module_version', '')
+        if not isinstance(moduleversion, str):
+            raise MesonException('Module version keyword argument must be a string.')
+        if main_xml != '':
+            if main_file != '':
+                raise MesonException('You can only specify main_xml or main_sgml, not both.')
+            main_file = main_xml
+        targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc'
+        command = state.environment.get_build_command()
+
+        namespace = kwargs.get('namespace', '')
+        mode = kwargs.get('mode', 'auto')
+        VALID_MODES = ('xml', 'sgml', 'none', 'auto')
+        if mode not in VALID_MODES:
+            raise MesonException(f'gtkdoc: Mode {mode} is not a valid mode: {VALID_MODES}')
+
+        src_dirs = mesonlib.extract_as_list(kwargs, 'src_dir')
+        header_dirs = []
+        for src_dir in src_dirs:
+            if isinstance(src_dir, HoldableObject):
+                if not isinstance(src_dir, build.IncludeDirs):
+                    raise MesonException('Invalid keyword argument for src_dir.')
+                for inc_dir in src_dir.get_incdirs():
+                    header_dirs.append(os.path.join(state.environment.get_source_dir(),
+                                                    src_dir.get_curdir(), inc_dir))
+                    header_dirs.append(os.path.join(state.environment.get_build_dir(),
+                                                    src_dir.get_curdir(), inc_dir))
+            else:
+                header_dirs.append(src_dir)
+
+        args = ['--internal', 'gtkdoc',
+                '--sourcedir=' + state.environment.get_source_dir(),
+                '--builddir=' + state.environment.get_build_dir(),
+                '--subdir=' + state.subdir,
+                '--headerdirs=' + '@@'.join(header_dirs),
+                '--mainfile=' + main_file,
+                '--modulename=' + modulename,
+                '--moduleversion=' + moduleversion,
+                '--mode=' + mode]
+        for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+            program_name = 'gtkdoc-' + tool
+            program = state.find_program(program_name)
+            path = program.get_path()
+            args.append(f'--{program_name}={path}')
+        if namespace:
+            args.append('--namespace=' + namespace)
+        args += self._unpack_args('--htmlargs=', 'html_args', kwargs)
+        args += self._unpack_args('--scanargs=', 'scan_args', kwargs)
+        args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs)
+        args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state)
+        args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs)
+        args += self._unpack_args('--mkdbargs=', 'mkdb_args', kwargs)
+        args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state)
+
+        depends = []
+        content_files = []
+        for s in mesonlib.extract_as_list(kwargs, 'content_files'):
+            if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+                depends.append(s)
+                for o in s.get_outputs():
+                    content_files.append(os.path.join(state.environment.get_build_dir(),
+                                                      state.backend.get_target_dir(s),
+                                                      o))
+            elif isinstance(s, mesonlib.File):
+                content_files.append(s.absolute_path(state.environment.get_source_dir(),
+                                                     state.environment.get_build_dir()))
+            elif isinstance(s, build.GeneratedList):
+                depends.append(s)
+                for gen_src in s.get_outputs():
+                    content_files.append(os.path.join(state.environment.get_source_dir(),
+                                                      state.subdir,
+                                                      gen_src))
+            elif isinstance(s, str):
+                content_files.append(os.path.join(state.environment.get_source_dir(),
+                                                  state.subdir,
+                                                  s))
+            else:
+                raise MesonException(
+                    f'Invalid object type: {s.__class__.__name__!r}')
+        args += ['--content-files=' + '@@'.join(content_files)]
+
+        args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state)
+        args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs)
+        args += self._unpack_args('--installdir=', 'install_dir', kwargs)
+        args += self._get_build_args(kwargs, state, depends)
+        custom_kwargs = {'output': modulename + '-decl.txt',
+                         'command': command + args,
+                         'depends': depends,
+                         'build_always_stale': True,
+                         }
+        custom_target = build.CustomTarget(targetname, state.subdir, state.subproject, custom_kwargs)
+        alias_target = build.AliasTarget(targetname, [custom_target], state.subdir, state.subproject)
+        if kwargs.get('check', False):
+            check_cmd = state.find_program('gtkdoc-check')
+            check_env = ['DOC_MODULE=' + modulename,
+                         'DOC_MAIN_SGML_FILE=' + main_file]
+            check_args = [targetname + '-check', check_cmd]
+            check_workdir = os.path.join(state.environment.get_build_dir(), state.subdir)
+            state.test(check_args, env=check_env, workdir=check_workdir, depends=custom_target)
+        res = [custom_target, alias_target]
+        if kwargs.get('install', True):
+            res.append(state.backend.get_executable_serialisation(command + args))
+        return ModuleReturnValue(custom_target, res)
+
+    def _get_build_args(self, kwargs, state, depends):
+        args = []
+        deps = extract_as_list(kwargs, 'dependencies')
+        cflags = []
+        cflags.extend(mesonlib.stringlistify(kwargs.pop('c_args', [])))
+        deps_cflags, internal_ldflags, external_ldflags, gi_includes = \
+            self._get_dependencies_flags(deps, state, depends, include_rpath=True)
+        inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories')
+        for incd in inc_dirs:
+            if not isinstance(incd, (str, build.IncludeDirs)):
+                raise MesonException(
+                    'Gir include dirs should be include_directories().')
+
+        cflags.extend(deps_cflags)
+        cflags.extend(state.get_include_args(inc_dirs))
+        ldflags = []
+        ldflags.extend(internal_ldflags)
+        ldflags.extend(external_ldflags)
+
+        cflags.extend(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c'))
+        ldflags.extend(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c'))
+        compiler = state.environment.coredata.compilers[MachineChoice.HOST]['c']
+
+        compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)])
+        cflags.extend(compiler_flags[0])
+        ldflags.extend(compiler_flags[1])
+        ldflags.extend(compiler_flags[2])
+        if compiler:
+            args += ['--cc=%s' % join_args(compiler.get_exelist())]
+            args += ['--ld=%s' % join_args(compiler.get_linker_exelist())]
+        if cflags:
+            args += ['--cflags=%s' % join_args(cflags)]
+        if ldflags:
+            args += ['--ldflags=%s' % join_args(ldflags)]
+
+        return args
+
+    @noKwargs
+    def gtkdoc_html_dir(self, state, args, kwargs):
+        if len(args) != 1:
+            raise MesonException('Must have exactly one argument.')
+        modulename = args[0]
+        if not isinstance(modulename, str):
+            raise MesonException('Argument must be a string')
+        return os.path.join('share/gtk-doc/html', modulename)
+
+    @staticmethod
+    def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None):
+        if kwarg_name not in kwargs:
+            return []
+
+        new_args = mesonlib.extract_as_list(kwargs, kwarg_name)
+        args = []
+        for i in new_args:
+            if expend_file_state and isinstance(i, mesonlib.File):
+                i = i.absolute_path(expend_file_state.environment.get_source_dir(), expend_file_state.environment.get_build_dir())
+            elif expend_file_state and isinstance(i, str):
+                i = os.path.join(expend_file_state.environment.get_source_dir(), expend_file_state.subdir, i)
+            elif not isinstance(i, str):
+                raise MesonException(kwarg_name + ' values must be strings.')
+            args.append(i)
+
+        if args:
+            return [arg + '@@'.join(args)]
+
+        return []
+
+    def _get_autocleanup_args(self, kwargs, glib_version):
+        if not mesonlib.version_compare(glib_version, '>= 2.49.1'):
+            # Warn if requested, silently disable if not
+            if 'autocleanup' in kwargs:
+                mlog.warning('Glib version ({}) is too old to support the \'autocleanup\' '
+                             'kwarg, need 2.49.1 or newer'.format(glib_version))
+            return []
+        autocleanup = kwargs.pop('autocleanup', 'all')
+        values = ('none', 'objects', 'all')
+        if autocleanup not in values:
+            raise MesonException('gdbus_codegen does not support {!r} as an autocleanup value, '
+                                 'must be one of: {!r}'.format(autocleanup, ', '.join(values)))
+        return ['--c-generate-autocleanup', autocleanup]
+
+    @FeatureNewKwargs('build target', '0.46.0', ['install_header', 'install_dir', 'sources'])
+    @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+    @FeatureNewKwargs('build target', '0.47.0', ['extra_args', 'autocleanup'])
+    @permittedKwargs({'interface_prefix', 'namespace', 'extra_args', 'autocleanup', 'object_manager', 'build_by_default',
+                      'annotations', 'docbook', 'install_header', 'install_dir', 'sources'})
+    def gdbus_codegen(self, state, args, kwargs):
+        if len(args) not in (1, 2):
+            raise MesonException('gdbus_codegen takes at most two arguments, name and xml file.')
+        namebase = args[0]
+        xml_files = args[1:]
+        cmd = [state.find_program('gdbus-codegen')]
+        extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', []))
+        cmd += extra_args
+        # Autocleanup supported?
+        glib_version = self._get_native_glib_version(state)
+        cmd += self._get_autocleanup_args(kwargs, glib_version)
+        if 'interface_prefix' in kwargs:
+            cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
+        if 'namespace' in kwargs:
+            cmd += ['--c-namespace', kwargs.pop('namespace')]
+        if kwargs.get('object_manager', False):
+            cmd += ['--c-generate-object-manager']
+        if 'sources' in kwargs:
+            xml_files += mesonlib.listify(kwargs.pop('sources'))
+        build_by_default = kwargs.get('build_by_default', False)
+
+        # Annotations are a bit ugly in that they are a list of lists of strings...
+        annotations = kwargs.pop('annotations', [])
+        if not isinstance(annotations, list):
+            raise MesonException('annotations takes a list')
+        if annotations and isinstance(annotations, list) and not isinstance(annotations[0], list):
+            annotations = [annotations]
+
+        for annotation in annotations:
+            if len(annotation) != 3 or not all(isinstance(i, str) for i in annotation):
+                raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE')
+            cmd += ['--annotate'] + annotation
+
+        targets = []
+        install_header = kwargs.get('install_header', False)
+        install_dir = kwargs.get('install_dir', state.environment.coredata.get_option(mesonlib.OptionKey('includedir')))
+
+        output = namebase + '.c'
+        # Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2)
+        # Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2)
+        if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+            custom_kwargs = {'input': xml_files,
+                             'output': output,
+                             'command': cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@'],
+                             'build_by_default': build_by_default
+                             }
+        else:
+            if 'docbook' in kwargs:
+                docbook = kwargs['docbook']
+                if not isinstance(docbook, str):
+                    raise MesonException('docbook value must be a string.')
+
+                cmd += ['--generate-docbook', docbook]
+
+            # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
+            if mesonlib.version_compare(glib_version, '>= 2.51.3'):
+                cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
+            else:
+                self._print_gdbus_warning()
+                cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
+
+            custom_kwargs = {'input': xml_files,
+                             'output': output,
+                             'command': cmd,
+                             'build_by_default': build_by_default
+                             }
+
+        cfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
+        targets.append(cfile_custom_target)
+
+        output = namebase + '.h'
+        if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+            custom_kwargs = {'input': xml_files,
+                             'output': output,
+                             'command': cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@'],
+                             'build_by_default': build_by_default,
+                             'install': install_header,
+                             'install_dir': install_dir
+                             }
+        else:
+            custom_kwargs = {'input': xml_files,
+                             'output': output,
+                             'command': cmd,
+                             'build_by_default': build_by_default,
+                             'install': install_header,
+                             'install_dir': install_dir,
+                             'depends': cfile_custom_target
+                             }
+
+        hfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
+        targets.append(hfile_custom_target)
+
+        if 'docbook' in kwargs:
+            docbook = kwargs['docbook']
+            if not isinstance(docbook, str):
+                raise MesonException('docbook value must be a string.')
+
+            docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@']
+
+            # The docbook output is always ${docbook}-${name_of_xml_file}
+            output = namebase + '-docbook'
+            outputs = []
+            for f in xml_files:
+                outputs.append('{}-{}'.format(docbook, os.path.basename(str(f))))
+
+            if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+                custom_kwargs = {'input': xml_files,
+                                 'output': outputs,
+                                 'command': docbook_cmd,
+                                 'build_by_default': build_by_default
+                                 }
+            else:
+                custom_kwargs = {'input': xml_files,
+                                 'output': outputs,
+                                 'command': cmd,
+                                 'build_by_default': build_by_default,
+                                 'depends': cfile_custom_target
+                                 }
+
+            docbook_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
+            targets.append(docbook_custom_target)
+
+        return ModuleReturnValue(targets, targets)
+
+    @permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir',
+                      'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod',
+                      'fhead', 'fprod', 'ftail', 'vhead', 'vtail', 'depends'})
+    def mkenums(self, state, args, kwargs):
+        if len(args) != 1:
+            raise MesonException('Mkenums requires one positional argument.')
+        basename = args[0]
+
+        if 'sources' not in kwargs:
+            raise MesonException('Missing keyword argument "sources".')
+        sources = kwargs.pop('sources')
+        if isinstance(sources, str):
+            sources = [sources]
+        elif not isinstance(sources, list):
+            raise MesonException(
+                'Sources keyword argument must be a string or array.')
+
+        cmd = []
+        known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
+                        'identifier_prefix', 'symbol_prefix', 'template',
+                        'vhead', 'vprod', 'vtail']
+        known_custom_target_kwargs = ['install_dir', 'build_always',
+                                      'depends', 'depend_files']
+        c_template = h_template = None
+        install_header = False
+        for arg, value in kwargs.items():
+            if arg == 'sources':
+                raise AssertionError("sources should've already been handled")
+            elif arg == 'c_template':
+                c_template = value
+                if isinstance(c_template, mesonlib.File):
+                    c_template = c_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
+                if 'template' in kwargs:
+                    raise MesonException('Mkenums does not accept both '
+                                         'c_template and template keyword '
+                                         'arguments at the same time.')
+            elif arg == 'h_template':
+                h_template = value
+                if isinstance(h_template, mesonlib.File):
+                    h_template = h_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
+                if 'template' in kwargs:
+                    raise MesonException('Mkenums does not accept both '
+                                         'h_template and template keyword '
+                                         'arguments at the same time.')
+            elif arg == 'install_header':
+                install_header = value
+            elif arg in known_kwargs:
+                cmd += ['--' + arg.replace('_', '-'), value]
+            elif arg not in known_custom_target_kwargs:
+                raise MesonException(
+                    f'Mkenums does not take a {arg} keyword argument.')
+        cmd = [state.find_program(['glib-mkenums', 'mkenums'])] + cmd
+        custom_kwargs = {}
+        for arg in known_custom_target_kwargs:
+            if arg in kwargs:
+                custom_kwargs[arg] = kwargs[arg]
+
+        targets = []
+
+        if h_template is not None:
+            h_output = os.path.basename(os.path.splitext(h_template)[0])
+            # We always set template as the first element in the source array
+            # so --template consumes it.
+            h_cmd = cmd + ['--template', '@INPUT@']
+            h_sources = [h_template] + sources
+            custom_kwargs['install'] = install_header
+            if 'install_dir' not in custom_kwargs:
+                custom_kwargs['install_dir'] = \
+                    state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+            h_target = self._make_mkenum_custom_target(state, h_sources,
+                                                       h_output, h_cmd,
+                                                       custom_kwargs)
+            targets.append(h_target)
+
+        if c_template is not None:
+            c_output = os.path.basename(os.path.splitext(c_template)[0])
+            # We always set template as the first element in the source array
+            # so --template consumes it.
+            c_cmd = cmd + ['--template', '@INPUT@']
+            c_sources = [c_template] + sources
+            # Never install the C file. Complain on bug tracker if you need it.
+            custom_kwargs['install'] = False
+            if h_template is not None:
+                if 'depends' in custom_kwargs:
+                    custom_kwargs['depends'] += [h_target]
+                else:
+                    custom_kwargs['depends'] = h_target
+            c_target = self._make_mkenum_custom_target(state, c_sources,
+                                                       c_output, c_cmd,
+                                                       custom_kwargs)
+            targets.insert(0, c_target)
+
+        if c_template is None and h_template is None:
+            generic_cmd = cmd + ['@INPUT@']
+            custom_kwargs['install'] = install_header
+            if 'install_dir' not in custom_kwargs:
+                custom_kwargs['install_dir'] = \
+                    state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+            target = self._make_mkenum_custom_target(state, sources, basename,
+                                                     generic_cmd, custom_kwargs)
+            return ModuleReturnValue(target, [target])
+        elif len(targets) == 1:
+            return ModuleReturnValue(targets[0], [targets[0]])
+        else:
+            return ModuleReturnValue(targets, targets)
+
+    @FeatureNew('gnome.mkenums_simple', '0.42.0')
+    def mkenums_simple(self, state, args, kwargs):
+        hdr_filename = args[0] + '.h'
+        body_filename = args[0] + '.c'
+
+        # not really needed, just for sanity checking
+        forbidden_kwargs = ['c_template', 'h_template', 'eprod', 'fhead',
+                            'fprod', 'ftail', 'vhead', 'vtail', 'comments']
+        for arg in forbidden_kwargs:
+            if arg in kwargs:
+                raise MesonException(f'mkenums_simple() does not take a {arg} keyword argument')
+
+        # kwargs to pass as-is from mkenums_simple() to mkenums()
+        shared_kwargs = ['sources', 'install_header', 'install_dir',
+                         'identifier_prefix', 'symbol_prefix']
+        mkenums_kwargs = {}
+        for arg in shared_kwargs:
+            if arg in kwargs:
+                mkenums_kwargs[arg] = kwargs[arg]
+
+        # .c file generation
+        c_file_kwargs = copy.deepcopy(mkenums_kwargs)
+        if 'sources' not in kwargs:
+            raise MesonException('Missing keyword argument "sources".')
+        sources = kwargs['sources']
+        if isinstance(sources, str):
+            sources = [sources]
+        elif not isinstance(sources, list):
+            raise MesonException(
+                'Sources keyword argument must be a string or array.')
+
+        # The `install_header` argument will be used by mkenums() when
+        # not using template files, so we need to forcibly unset it
+        # when generating the C source file, otherwise we will end up
+        # installing it
+        c_file_kwargs['install_header'] = False
+
+        header_prefix = kwargs.get('header_prefix', '')
+        decl_decorator = kwargs.get('decorator', '')
+        func_prefix = kwargs.get('function_prefix', '')
+        body_prefix = kwargs.get('body_prefix', '')
+
+        # Maybe we should write our own template files into the build dir
+        # instead, but that seems like much more work, nice as it would be.
+        fhead = ''
+        if body_prefix != '':
+            fhead += '%s\n' % body_prefix
+        fhead += '#include "%s"\n' % hdr_filename
+        for hdr in sources:
+            fhead += '#include "%s"\n' % os.path.basename(str(hdr))
+        fhead += '''
+#define C_ENUM(v) ((gint) v)
+#define C_FLAGS(v) ((guint) v)
+'''
+        c_file_kwargs['fhead'] = fhead
+
+        c_file_kwargs['fprod'] = '''
+/* enumerations from "@basename@" */
+'''
+
+        c_file_kwargs['vhead'] = '''
+GType
+%s@enum_name@_get_type (void)
+{
+  static gsize gtype_id = 0;
+  static const G@Type@Value values[] = {''' % func_prefix
+
+        c_file_kwargs['vprod'] = '    { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },'
+
+        c_file_kwargs['vtail'] = '''    { 0, NULL, NULL }
+  };
+  if (g_once_init_enter (>ype_id)) {
+    GType new_type = g_@type@_register_static (g_intern_static_string ("@EnumName@"), values);
+    g_once_init_leave (>ype_id, new_type);
+  }
+  return (GType) gtype_id;
+}'''
+
+        rv = self.mkenums(state, [body_filename], c_file_kwargs)
+        c_file = rv.return_value
+
+        # .h file generation
+        h_file_kwargs = copy.deepcopy(mkenums_kwargs)
+
+        h_file_kwargs['fhead'] = '''#pragma once
+
+#include 
+{}
+
+G_BEGIN_DECLS
+'''.format(header_prefix)
+
+        h_file_kwargs['fprod'] = '''
+/* enumerations from "@basename@" */
+'''
+
+        h_file_kwargs['vhead'] = '''
+{}
+GType {}@enum_name@_get_type (void);
+#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({}@enum_name@_get_type())'''.format(decl_decorator, func_prefix, func_prefix)
+
+        h_file_kwargs['ftail'] = '''
+G_END_DECLS'''
+
+        rv = self.mkenums(state, [hdr_filename], h_file_kwargs)
+        h_file = rv.return_value
+
+        return ModuleReturnValue([c_file, h_file], [c_file, h_file])
+
+    @staticmethod
+    def _make_mkenum_custom_target(state, sources, output, cmd, kwargs):
+        custom_kwargs = {
+            'input': sources,
+            'output': output,
+            'capture': True,
+            'command': cmd
+        }
+        custom_kwargs.update(kwargs)
+        return build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs,
+                                  # https://github.com/mesonbuild/meson/issues/973
+                                  absolute_paths=True)
+
+    @permittedKwargs({'sources', 'prefix', 'install_header', 'install_dir', 'stdinc',
+                      'nostdinc', 'internal', 'skip_source', 'valist_marshallers',
+                      'extra_args'})
+    def genmarshal(self, state, args, kwargs):
+        if len(args) != 1:
+            raise MesonException(
+                'Genmarshal requires one positional argument.')
+        output = args[0]
+
+        if 'sources' not in kwargs:
+            raise MesonException('Missing keyword argument "sources".')
+        sources = kwargs.pop('sources')
+        if isinstance(sources, str):
+            sources = [sources]
+        elif not isinstance(sources, list):
+            raise MesonException(
+                'Sources keyword argument must be a string or array.')
+
+        new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3')
+
+        cmd = [state.find_program('glib-genmarshal')]
+        known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc',
+                        'valist_marshallers', 'extra_args']
+        known_custom_target_kwargs = ['build_always', 'depends',
+                                      'depend_files', 'install_dir',
+                                      'install_header']
+        for arg, value in kwargs.items():
+            if arg == 'prefix':
+                cmd += ['--prefix', value]
+            elif arg == 'extra_args':
+                if new_genmarshal:
+                    cmd += mesonlib.stringlistify(value)
+                else:
+                    mlog.warning('The current version of GLib does not support extra arguments \n'
+                                 'for glib-genmarshal. You need at least GLib 2.53.3. See ',
+                                 mlog.bold('https://github.com/mesonbuild/meson/pull/2049'))
+            elif arg in known_kwargs and value:
+                cmd += ['--' + arg.replace('_', '-')]
+            elif arg not in known_custom_target_kwargs:
+                raise MesonException(
+                    'Genmarshal does not take a {} keyword argument.'.format(
+                        arg))
+
+        install_header = kwargs.pop('install_header', False)
+        install_dir = kwargs.pop('install_dir', [])
+
+        custom_kwargs = {
+            'input': sources,
+        }
+
+        # https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf
+        if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'):
+            cmd += ['--output', '@OUTPUT@']
+        else:
+            custom_kwargs['capture'] = True
+
+        for arg in known_custom_target_kwargs:
+            if arg in kwargs:
+                custom_kwargs[arg] = kwargs[arg]
+
+        header_file = output + '.h'
+        custom_kwargs['command'] = cmd + ['--body', '@INPUT@']
+        if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'):
+            # Silence any warnings about missing prototypes
+            custom_kwargs['command'] += ['--include-header', header_file]
+        custom_kwargs['output'] = output + '.c'
+        body = build.CustomTarget(output + '_c', state.subdir, state.subproject, custom_kwargs)
+
+        custom_kwargs['install'] = install_header
+        custom_kwargs['install_dir'] = install_dir
+        if new_genmarshal:
+            cmd += ['--pragma-once']
+        custom_kwargs['command'] = cmd + ['--header', '@INPUT@']
+        custom_kwargs['output'] = header_file
+        header = build.CustomTarget(output + '_h', state.subdir, state.subproject, custom_kwargs)
+
+        rv = [body, header]
+        return ModuleReturnValue(rv, rv)
+
+    @staticmethod
+    def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False):
+        arg_list = mesonlib.extract_as_list(kwargs, variable)
+        ret = []
+        for arg in arg_list:
+            if not isinstance(arg, str):
+                types = 'strings' + ' or InternalDependencys' if accept_vapi else ''
+                raise MesonException(f'All {variable} must be {types}')
+            ret.append(prefix + arg)
+        return ret
+
+    def _extract_vapi_packages(self, state, kwargs):
+        '''
+        Packages are special because we need to:
+        - Get a list of packages for the .deps file
+        - Get a list of depends for any VapiTargets
+        - Get package name from VapiTargets
+        - Add include dirs for any VapiTargets
+        '''
+        arg_list = kwargs.get('packages')
+        if not arg_list:
+            return [], [], [], []
+        arg_list = mesonlib.listify(arg_list)
+        vapi_depends = []
+        vapi_packages = []
+        vapi_includes = []
+        ret = []
+        remaining_args = []
+        for arg in arg_list:
+            if isinstance(arg, InternalDependency):
+                targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
+                for target in targets:
+                    srcdir = os.path.join(state.environment.get_source_dir(),
+                                          target.get_subdir())
+                    outdir = os.path.join(state.environment.get_build_dir(),
+                                          target.get_subdir())
+                    outfile = target.get_outputs()[0][:-5] # Strip .vapi
+                    ret.append('--vapidir=' + outdir)
+                    ret.append('--girdir=' + outdir)
+                    ret.append('--pkg=' + outfile)
+                    vapi_depends.append(target)
+                    vapi_packages.append(outfile)
+                    vapi_includes.append(srcdir)
+            else:
+                vapi_packages.append(arg)
+                remaining_args.append(arg)
+
+        kwargs['packages'] = remaining_args
+        vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True)
+        return vapi_args, vapi_depends, vapi_packages, vapi_includes
+
+    def _generate_deps(self, state, library, packages, install_dir):
+        outdir = state.environment.scratch_dir
+        fname = os.path.join(outdir, library + '.deps')
+        with open(fname, 'w', encoding='utf-8') as ofile:
+            for package in packages:
+                ofile.write(package + '\n')
+        return build.Data([mesonlib.File(True, outdir, fname)], install_dir, None, state.subproject)
+
+    def _get_vapi_link_with(self, target):
+        link_with = []
+        for dep in target.get_target_dependencies():
+            if isinstance(dep, build.SharedLibrary):
+                link_with.append(dep)
+            elif isinstance(dep, GirTarget):
+                link_with += self._get_vapi_link_with(dep)
+        return link_with
+
+    @permittedKwargs({'sources', 'packages', 'metadata_dirs', 'gir_dirs',
+                      'vapi_dirs', 'install', 'install_dir'})
+    def generate_vapi(self, state, args, kwargs):
+        if len(args) != 1:
+            raise MesonException('The library name is required')
+
+        if not isinstance(args[0], str):
+            raise MesonException('The first argument must be the name of the library')
+        created_values = []
+
+        library = args[0]
+        build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
+        source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
+        pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs)
+        if 'VAPIGEN' in os.environ:
+            cmd = [state.find_program(os.environ['VAPIGEN'])]
+        else:
+            cmd = [state.find_program('vapigen')]
+        cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir]
+        cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs)
+        cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs)
+        cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs)
+        cmd += pkg_cmd
+        cmd += ['--metadatadir=' + source_dir]
+
+        if 'sources' not in kwargs:
+            raise MesonException('sources are required to generate the vapi file')
+
+        inputs = mesonlib.extract_as_list(kwargs, 'sources')
+
+        link_with = []
+        for i in inputs:
+            if isinstance(i, str):
+                cmd.append(os.path.join(source_dir, i))
+            elif isinstance(i, GirTarget):
+                link_with += self._get_vapi_link_with(i)
+                subdir = os.path.join(state.environment.get_build_dir(),
+                                      i.get_subdir())
+                gir_file = os.path.join(subdir, i.get_outputs()[0])
+                cmd.append(gir_file)
+            else:
+                raise MesonException('Input must be a str or GirTarget')
+
+        vapi_output = library + '.vapi'
+        custom_kwargs = {
+            'command': cmd,
+            'input': inputs,
+            'output': vapi_output,
+            'depends': vapi_depends,
+        }
+        install_dir = kwargs.get('install_dir',
+                                 os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('datadir')),
+                                              'vala', 'vapi'))
+        if kwargs.get('install'):
+            custom_kwargs['install'] = kwargs['install']
+            custom_kwargs['install_dir'] = install_dir
+
+            # We shouldn't need this locally but we install it
+            deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
+            created_values.append(deps_target)
+        vapi_target = VapiTarget(vapi_output, state.subdir, state.subproject, custom_kwargs)
+
+        # So to try our best to get this to just work we need:
+        # - link with with the correct library
+        # - include the vapi and dependent vapi files in sources
+        # - add relevant directories to include dirs
+        incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
+        sources = [vapi_target] + vapi_depends
+        rv = InternalDependency(None, incs, [], [], link_with, [], sources, [], {})
+        created_values.append(rv)
+        return ModuleReturnValue(rv, created_values)
+
+def initialize(*args, **kwargs):
+    mod = GnomeModule(*args, **kwargs)
+    mod.interpreter.append_holder_map(GResourceTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(GResourceHeaderTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(GirTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(TypelibTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(VapiTarget, interpreter.CustomTargetHolder)
+    return mod
diff --git a/meson/mesonbuild/modules/hotdoc.py b/meson/mesonbuild/modules/hotdoc.py
new file mode 100644
index 000000000..4dccd067a
--- /dev/null
+++ b/meson/mesonbuild/modules/hotdoc.py
@@ -0,0 +1,432 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for generating documentation using hotdoc'''
+
+import os
+from collections import OrderedDict
+
+from mesonbuild import mesonlib
+from mesonbuild import mlog, build
+from mesonbuild.coredata import MesonException
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..dependencies import Dependency, InternalDependency
+from ..interpreterbase import FeatureNew, InvalidArguments, noPosargs, noKwargs
+from ..interpreter import CustomTargetHolder
+from ..programs import ExternalProgram
+
+
+def ensure_list(value):
+    if not isinstance(value, list):
+        return [value]
+    return value
+
+
+MIN_HOTDOC_VERSION = '0.8.100'
+
+
+class HotdocTargetBuilder:
+    def __init__(self, name, state, hotdoc, interpreter, kwargs):
+        self.hotdoc = hotdoc
+        self.build_by_default = kwargs.pop('build_by_default', False)
+        self.kwargs = kwargs
+        self.name = name
+        self.state = state
+        self.interpreter = interpreter
+        self.include_paths = OrderedDict()
+
+        self.builddir = state.environment.get_build_dir()
+        self.sourcedir = state.environment.get_source_dir()
+        self.subdir = state.subdir
+        self.build_command = state.environment.get_build_command()
+
+        self.cmd = ['conf', '--project-name', name, "--disable-incremental-build",
+                    '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')]
+
+        self._extra_extension_paths = set()
+        self.extra_assets = set()
+        self._dependencies = []
+        self._subprojects = []
+
+    def process_known_arg(self, option, types, argname=None,
+                          value_processor=None, mandatory=False,
+                          force_list=False):
+        if not argname:
+            argname = option.strip("-").replace("-", "_")
+
+        value, _ = self.get_value(
+            types, argname, None, value_processor, mandatory, force_list)
+
+        self.set_arg_value(option, value)
+
+    def set_arg_value(self, option, value):
+        if value is None:
+            return
+
+        if isinstance(value, bool):
+            if value:
+                self.cmd.append(option)
+        elif isinstance(value, list):
+            # Do not do anything on empty lists
+            if value:
+                # https://bugs.python.org/issue9334 (from 2010 :( )
+                # The syntax with nargs=+ is inherently ambiguous
+                # A workaround for this case is to simply prefix with a space
+                # every value starting with a dash
+                escaped_value = []
+                for e in value:
+                    if isinstance(e, str) and e.startswith('-'):
+                        escaped_value += [' %s' % e]
+                    else:
+                        escaped_value += [e]
+                if option:
+                    self.cmd.extend([option] + escaped_value)
+                else:
+                    self.cmd.extend(escaped_value)
+        else:
+            # argparse gets confused if value(s) start with a dash.
+            # When an option expects a single value, the unambiguous way
+            # to specify it is with =
+            if isinstance(value, str):
+                self.cmd.extend([f'{option}={value}'])
+            else:
+                self.cmd.extend([option, value])
+
+    def check_extra_arg_type(self, arg, value):
+        if isinstance(value, list):
+            for v in value:
+                self.check_extra_arg_type(arg, v)
+            return
+
+        valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.BuildTarget)
+        if not isinstance(value, valid_types):
+            raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format(
+                arg, value, [t.__name__ for t in valid_types]))
+
+    def process_extra_args(self):
+        for arg, value in self.kwargs.items():
+            option = "--" + arg.replace("_", "-")
+            self.check_extra_arg_type(arg, value)
+            self.set_arg_value(option, value)
+
+    def get_value(self, types, argname, default=None, value_processor=None,
+                  mandatory=False, force_list=False):
+        if not isinstance(types, list):
+            types = [types]
+        try:
+            uvalue = value = self.kwargs.pop(argname)
+            if value_processor:
+                value = value_processor(value)
+
+            for t in types:
+                if isinstance(value, t):
+                    if force_list and not isinstance(value, list):
+                        return [value], uvalue
+                    return value, uvalue
+            raise MesonException("%s field value %s is not valid,"
+                                 " valid types are %s" % (argname, value,
+                                                          types))
+        except KeyError:
+            if mandatory:
+                raise MesonException("%s mandatory field not found" % argname)
+
+            if default is not None:
+                return default, default
+
+        return None, None
+
+    def setup_extension_paths(self, paths):
+        if not isinstance(paths, list):
+            paths = [paths]
+
+        for path in paths:
+            self.add_extension_paths([path])
+
+        return []
+
+    def add_extension_paths(self, paths):
+        for path in paths:
+            if path in self._extra_extension_paths:
+                continue
+
+            self._extra_extension_paths.add(path)
+            self.cmd.extend(["--extra-extension-path", path])
+
+    def process_extra_extension_paths(self):
+        self.get_value([list, str], 'extra_extensions_paths',
+                       default="", value_processor=self.setup_extension_paths)
+
+    def replace_dirs_in_string(self, string):
+        return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir)
+
+    def process_gi_c_source_roots(self):
+        if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0:
+            return
+
+        value, _ = self.get_value([list, str], 'gi_c_source_roots', default=[], force_list=True)
+        value.extend([
+            os.path.join(self.state.environment.get_source_dir(),
+                         self.interpreter.subproject_dir, self.state.subproject),
+            os.path.join(self.state.environment.get_build_dir(), self.interpreter.subproject_dir, self.state.subproject)
+        ])
+
+        self.cmd += ['--gi-c-source-roots'] + value
+
+    def process_dependencies(self, deps):
+        cflags = set()
+        for dep in mesonlib.listify(ensure_list(deps)):
+            if isinstance(dep, InternalDependency):
+                inc_args = self.state.get_include_args(dep.include_directories)
+                cflags.update([self.replace_dirs_in_string(x)
+                               for x in inc_args])
+                cflags.update(self.process_dependencies(dep.libraries))
+                cflags.update(self.process_dependencies(dep.sources))
+                cflags.update(self.process_dependencies(dep.ext_deps))
+            elif isinstance(dep, Dependency):
+                cflags.update(dep.get_compile_args())
+            elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+                self._dependencies.append(dep)
+                for incd in dep.get_include_dirs():
+                    cflags.update(incd.get_incdirs())
+            elif isinstance(dep, HotdocTarget):
+                # Recurse in hotdoc target dependencies
+                self.process_dependencies(dep.get_target_dependencies())
+                self._subprojects.extend(dep.subprojects)
+                self.process_dependencies(dep.subprojects)
+                self.add_include_path(os.path.join(self.builddir, dep.hotdoc_conf.subdir))
+                self.cmd += ['--extra-assets=' + p for p in dep.extra_assets]
+                self.add_extension_paths(dep.extra_extension_paths)
+            elif isinstance(dep, build.CustomTarget) or isinstance(dep, build.BuildTarget):
+                self._dependencies.append(dep)
+
+        return [f.strip('-I') for f in cflags]
+
+    def process_extra_assets(self):
+        self._extra_assets, _ = self.get_value("--extra-assets", (str, list), default=[],
+                                               force_list=True)
+        for assets_path in self._extra_assets:
+            self.cmd.extend(["--extra-assets", assets_path])
+
+    def process_subprojects(self):
+        _, value = self.get_value([
+            list, HotdocTarget], argname="subprojects",
+            force_list=True, value_processor=self.process_dependencies)
+
+        if value is not None:
+            self._subprojects.extend(value)
+
+    def flatten_config_command(self):
+        cmd = []
+        for arg in mesonlib.listify(self.cmd, flatten=True):
+            if isinstance(arg, mesonlib.File):
+                arg = arg.absolute_path(self.state.environment.get_source_dir(),
+                                        self.state.environment.get_build_dir())
+            elif isinstance(arg, build.IncludeDirs):
+                for inc_dir in arg.get_incdirs():
+                    cmd.append(os.path.join(self.sourcedir, arg.get_curdir(), inc_dir))
+                    cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir))
+
+                continue
+            elif isinstance(arg, build.CustomTarget) or isinstance(arg, build.BuildTarget):
+                self._dependencies.append(arg)
+                arg = self.interpreter.backend.get_target_filename_abs(arg)
+
+            cmd.append(arg)
+
+        return cmd
+
+    def generate_hotdoc_config(self):
+        cwd = os.path.abspath(os.curdir)
+        ncwd = os.path.join(self.sourcedir, self.subdir)
+        mlog.log('Generating Hotdoc configuration for: ', mlog.bold(self.name))
+        os.chdir(ncwd)
+        self.hotdoc.run_hotdoc(self.flatten_config_command())
+        os.chdir(cwd)
+
+    def ensure_file(self, value):
+        if isinstance(value, list):
+            res = []
+            for val in value:
+                res.append(self.ensure_file(val))
+            return res
+
+        if not isinstance(value, mesonlib.File):
+            return mesonlib.File.from_source_file(self.sourcedir, self.subdir, value)
+
+        return value
+
+    def ensure_dir(self, value):
+        if os.path.isabs(value):
+            _dir = value
+        else:
+            _dir = os.path.join(self.sourcedir, self.subdir, value)
+
+        if not os.path.isdir(_dir):
+            raise InvalidArguments('"%s" is not a directory.' % _dir)
+
+        return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir))
+
+    def check_forbidden_args(self):
+        for arg in ['conf_file']:
+            if arg in self.kwargs:
+                raise InvalidArguments('Argument "%s" is forbidden.' % arg)
+
+    def add_include_path(self, path):
+        self.include_paths[path] = path
+
+    def make_targets(self):
+        self.check_forbidden_args()
+        file_types = (str, mesonlib.File)
+        self.process_known_arg("--index", file_types, mandatory=True, value_processor=self.ensure_file)
+        self.process_known_arg("--project-version", str, mandatory=True)
+        self.process_known_arg("--sitemap", file_types, mandatory=True, value_processor=self.ensure_file)
+        self.process_known_arg("--html-extra-theme", str, value_processor=self.ensure_dir)
+        self.process_known_arg(None, list, "include_paths", force_list=True,
+                               value_processor=lambda x: [self.add_include_path(self.ensure_dir(v)) for v in ensure_list(x)])
+        self.process_known_arg('--c-include-directories',
+                               [Dependency, build.StaticLibrary, build.SharedLibrary, list], argname="dependencies",
+                               force_list=True, value_processor=self.process_dependencies)
+        self.process_gi_c_source_roots()
+        self.process_extra_assets()
+        self.process_extra_extension_paths()
+        self.process_subprojects()
+
+        install, install = self.get_value(bool, "install", mandatory=False)
+        self.process_extra_args()
+
+        fullname = self.name + '-doc'
+        hotdoc_config_name = fullname + '.json'
+        hotdoc_config_path = os.path.join(
+            self.builddir, self.subdir, hotdoc_config_name)
+        with open(hotdoc_config_path, 'w', encoding='utf-8') as f:
+            f.write('{}')
+
+        self.cmd += ['--conf-file', hotdoc_config_path]
+        self.add_include_path(os.path.join(self.builddir, self.subdir))
+        self.add_include_path(os.path.join(self.sourcedir, self.subdir))
+
+        depfile = os.path.join(self.builddir, self.subdir, self.name + '.deps')
+        self.cmd += ['--deps-file-dest', depfile]
+
+        for path in self.include_paths.keys():
+            self.cmd.extend(['--include-path', path])
+
+        if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)):
+            self.cmd.append('--fatal-warning')
+        self.generate_hotdoc_config()
+
+        target_cmd = self.build_command + ["--internal", "hotdoc"] + \
+            self.hotdoc.get_command() + ['run', '--conf-file', hotdoc_config_name] + \
+            ['--builddir', os.path.join(self.builddir, self.subdir)]
+
+        target = HotdocTarget(fullname,
+                              subdir=self.subdir,
+                              subproject=self.state.subproject,
+                              hotdoc_conf=mesonlib.File.from_built_file(
+                                  self.subdir, hotdoc_config_name),
+                              extra_extension_paths=self._extra_extension_paths,
+                              extra_assets=self._extra_assets,
+                              subprojects=self._subprojects,
+                              command=target_cmd,
+                              depends=self._dependencies,
+                              output=fullname,
+                              depfile=os.path.basename(depfile),
+                              build_by_default=self.build_by_default)
+
+        install_script = None
+        if install is True:
+            install_script = self.state.backend.get_executable_serialisation(self.build_command + [
+                "--internal", "hotdoc",
+                "--install", os.path.join(fullname, 'html'),
+                '--name', self.name,
+                '--builddir', os.path.join(self.builddir, self.subdir)] +
+                self.hotdoc.get_command() +
+                ['run', '--conf-file', hotdoc_config_name])
+
+        return (target, install_script)
+
+
+class HotdocTargetHolder(CustomTargetHolder):
+    def __init__(self, target, interp):
+        super().__init__(target, interp)
+        self.methods.update({'config_path': self.config_path_method})
+
+    @noPosargs
+    @noKwargs
+    def config_path_method(self, *args, **kwargs):
+        conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir,
+                                                          self.interpreter.environment.build_dir)
+        return conf
+
+
+class HotdocTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, hotdoc_conf, extra_extension_paths, extra_assets,
+                 subprojects, **kwargs):
+        super().__init__(name, subdir, subproject, kwargs, absolute_paths=True)
+        self.hotdoc_conf = hotdoc_conf
+        self.extra_extension_paths = extra_extension_paths
+        self.extra_assets = extra_assets
+        self.subprojects = subprojects
+
+    def __getstate__(self):
+        # Make sure we do not try to pickle subprojects
+        res = self.__dict__.copy()
+        res['subprojects'] = []
+
+        return res
+
+
+class HotDocModule(ExtensionModule):
+    @FeatureNew('Hotdoc Module', '0.48.0')
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.hotdoc = ExternalProgram('hotdoc')
+        if not self.hotdoc.found():
+            raise MesonException('hotdoc executable not found')
+
+        try:
+            from hotdoc.run_hotdoc import run  # noqa: F401
+            self.hotdoc.run_hotdoc = run
+        except Exception as e:
+            raise MesonException('hotdoc {} required but not found. ({})'.format(
+                MIN_HOTDOC_VERSION, e))
+        self.methods.update({
+            'has_extensions': self.has_extensions,
+            'generate_doc': self.generate_doc,
+        })
+
+    @noKwargs
+    def has_extensions(self, state, args, kwargs):
+        return self.hotdoc.run_hotdoc(['--has-extension=%s' % extension for extension in args]) == 0
+
+    def generate_doc(self, state, args, kwargs):
+        if len(args) != 1:
+            raise MesonException('One positional argument is'
+                                 ' required for the project name.')
+
+        project_name = args[0]
+        builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs)
+        target, install_script = builder.make_targets()
+        targets = [target]
+        if install_script:
+            targets.append(install_script)
+
+        return ModuleReturnValue(targets[0], targets)
+
+
+def initialize(interpreter):
+    mod = HotDocModule(interpreter)
+    mod.interpreter.append_holder_map(HotdocTarget, HotdocTargetHolder)
+    return mod
diff --git a/meson/mesonbuild/modules/i18n.py b/meson/mesonbuild/modules/i18n.py
new file mode 100644
index 000000000..a64838b57
--- /dev/null
+++ b/meson/mesonbuild/modules/i18n.py
@@ -0,0 +1,197 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+
+from os import path
+from .. import coredata, mesonlib, build, mlog
+from ..mesonlib import MesonException
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+
+PRESET_ARGS = {
+    'glib': [
+        '--from-code=UTF-8',
+        '--add-comments',
+
+        # https://developer.gnome.org/glib/stable/glib-I18N.html
+        '--keyword=_',
+        '--keyword=N_',
+        '--keyword=C_:1c,2',
+        '--keyword=NC_:1c,2',
+        '--keyword=g_dcgettext:2',
+        '--keyword=g_dngettext:2,3',
+        '--keyword=g_dpgettext2:2c,3',
+
+        '--flag=N_:1:pass-c-format',
+        '--flag=C_:2:pass-c-format',
+        '--flag=NC_:2:pass-c-format',
+        '--flag=g_dngettext:2:pass-c-format',
+        '--flag=g_strdup_printf:1:c-format',
+        '--flag=g_string_printf:2:c-format',
+        '--flag=g_string_append_printf:2:c-format',
+        '--flag=g_error_new:3:c-format',
+        '--flag=g_set_error:4:c-format',
+        '--flag=g_markup_printf_escaped:1:c-format',
+        '--flag=g_log:3:c-format',
+        '--flag=g_print:1:c-format',
+        '--flag=g_printerr:1:c-format',
+        '--flag=g_printf:1:c-format',
+        '--flag=g_fprintf:2:c-format',
+        '--flag=g_sprintf:2:c-format',
+        '--flag=g_snprintf:3:c-format',
+    ]
+}
+
+
+class I18nModule(ExtensionModule):
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'merge_file': self.merge_file,
+            'gettext': self.gettext,
+        })
+
+    @staticmethod
+    def nogettext_warning():
+        mlog.warning('Gettext not found, all translation targets will be ignored.', once=True)
+
+    @staticmethod
+    def _get_data_dirs(state, dirs):
+        """Returns source directories of relative paths"""
+        src_dir = path.join(state.environment.get_source_dir(), state.subdir)
+        return [path.join(src_dir, d) for d in dirs]
+
+    @FeatureNew('i18n.merge_file', '0.37.0')
+    @FeatureNewKwargs('i18n.merge_file', '0.51.0', ['args'])
+    @permittedKwargs(build.CustomTarget.known_kwargs | {'data_dirs', 'po_dir', 'type', 'args'})
+    def merge_file(self, state, args, kwargs):
+        if not shutil.which('xgettext'):
+            self.nogettext_warning()
+            return
+        podir = kwargs.pop('po_dir', None)
+        if not podir:
+            raise MesonException('i18n: po_dir is a required kwarg')
+        podir = path.join(state.build_to_src, state.subdir, podir)
+
+        file_type = kwargs.pop('type', 'xml')
+        VALID_TYPES = ('xml', 'desktop')
+        if file_type not in VALID_TYPES:
+            raise MesonException(f'i18n: "{file_type}" is not a valid type {VALID_TYPES}')
+
+        datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.pop('data_dirs', [])))
+        datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None
+
+        command = state.environment.get_build_command() + [
+            '--internal', 'msgfmthelper',
+            '@INPUT@', '@OUTPUT@', file_type, podir
+        ]
+        if datadirs:
+            command.append(datadirs)
+
+        if 'args' in kwargs:
+            command.append('--')
+            command.append(mesonlib.stringlistify(kwargs.pop('args', [])))
+
+        kwargs['command'] = command
+
+        # We only use this input file to create a name of the custom target.
+        # Thus we can ignore the other entries.
+        inputfile = mesonlib.extract_as_list(kwargs, 'input')[0]
+        if isinstance(inputfile, str):
+            inputfile = mesonlib.File.from_source_file(state.environment.source_dir,
+                                                       state.subdir, inputfile)
+        if isinstance(inputfile, mesonlib.File):
+            # output could be '@BASENAME@' in which case we need to do substitutions
+            # to get a unique target name.
+            output = kwargs['output']
+            ifile_abs = inputfile.absolute_path(state.environment.source_dir,
+                                                state.environment.build_dir)
+            values = mesonlib.get_filenames_templates_dict([ifile_abs], None)
+            outputs = mesonlib.substitute_values([output], values)
+            output = outputs[0]
+            ct = build.CustomTarget(output + '_' + state.subdir.replace('/', '@').replace('\\', '@') + '_merge', state.subdir, state.subproject, kwargs)
+        else:
+            ct = build.CustomTarget(kwargs['output'] + '_merge', state.subdir, state.subproject, kwargs)
+
+        return ModuleReturnValue(ct, [ct])
+
+    @FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset'])
+    @FeatureNewKwargs('i18n.gettext', '0.50.0', ['install_dir'])
+    @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install', 'install_dir'})
+    def gettext(self, state, args, kwargs):
+        if len(args) != 1:
+            raise coredata.MesonException('Gettext requires one positional argument (package name).')
+        if not shutil.which('xgettext'):
+            self.nogettext_warning()
+            return
+        packagename = args[0]
+        languages = mesonlib.stringlistify(kwargs.get('languages', []))
+        datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.get('data_dirs', [])))
+        extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+
+        preset = kwargs.pop('preset', None)
+        if preset:
+            preset_args = PRESET_ARGS.get(preset)
+            if not preset_args:
+                raise coredata.MesonException('i18n: Preset "{}" is not one of the valid options: {}'.format(
+                                              preset, list(PRESET_ARGS.keys())))
+            extra_args = set(preset_args + extra_args)
+
+        pkg_arg = '--pkgname=' + packagename
+        lang_arg = '--langs=' + '@@'.join(languages) if languages else None
+        datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None
+        extra_args = '--extra-args=' + '@@'.join(extra_args) if extra_args else None
+
+        potargs = state.environment.get_build_command() + ['--internal', 'gettext', 'pot', pkg_arg]
+        if datadirs:
+            potargs.append(datadirs)
+        if extra_args:
+            potargs.append(extra_args)
+        pottarget = build.RunTarget(packagename + '-pot', potargs, [], state.subdir, state.subproject)
+
+        gmoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'gen_gmo']
+        if lang_arg:
+            gmoargs.append(lang_arg)
+        gmotarget = build.RunTarget(packagename + '-gmo', gmoargs, [], state.subdir, state.subproject)
+
+        updatepoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'update_po', pkg_arg]
+        if lang_arg:
+            updatepoargs.append(lang_arg)
+        if datadirs:
+            updatepoargs.append(datadirs)
+        if extra_args:
+            updatepoargs.append(extra_args)
+        updatepotarget = build.RunTarget(packagename + '-update-po', updatepoargs, [], state.subdir, state.subproject)
+
+        targets = [pottarget, gmotarget, updatepotarget]
+
+        install = kwargs.get('install', True)
+        if install:
+            install_dir = kwargs.get('install_dir', state.environment.coredata.get_option(mesonlib.OptionKey('localedir')))
+            script = state.environment.get_build_command()
+            args = ['--internal', 'gettext', 'install',
+                    '--subdir=' + state.subdir,
+                    '--localedir=' + install_dir,
+                    pkg_arg]
+            if lang_arg:
+                args.append(lang_arg)
+            iscript = state.backend.get_executable_serialisation(script + args)
+            targets.append(iscript)
+
+        return ModuleReturnValue(None, targets)
+
+def initialize(*args, **kwargs):
+    return I18nModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/keyval.py b/meson/mesonbuild/modules/keyval.py
new file mode 100644
index 000000000..b2d54db01
--- /dev/null
+++ b/meson/mesonbuild/modules/keyval.py
@@ -0,0 +1,72 @@
+# Copyright 2017, 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import ExtensionModule
+
+from .. import mesonlib
+from ..mesonlib import typeslistify
+from ..interpreterbase import FeatureNew, noKwargs, InvalidCode
+
+import os
+
+class KeyvalModule(ExtensionModule):
+
+    @FeatureNew('Keyval Module', '0.55.0')
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.methods.update({
+            'load': self.load,
+        })
+
+    def _load_file(self, path_to_config):
+        result = dict()
+        try:
+            with open(path_to_config, encoding='utf-8') as f:
+                for line in f:
+                    if '#' in line:
+                        comment_idx = line.index('#')
+                        line = line[:comment_idx]
+                    line = line.strip()
+                    try:
+                        name, val = line.split('=', 1)
+                    except ValueError:
+                        continue
+                    result[name.strip()] = val.strip()
+        except OSError as e:
+            raise mesonlib.MesonException(f'Failed to load {path_to_config}: {e}')
+
+        return result
+
+    @noKwargs
+    def load(self, state, args, kwargs):
+        sources = typeslistify(args, (str, mesonlib.File))
+        if len(sources) != 1:
+            raise InvalidCode('load takes only one file input.')
+
+        s = sources[0]
+        is_built = False
+        if isinstance(s, mesonlib.File):
+            is_built = is_built or s.is_built
+            s = s.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir)
+        else:
+            s = os.path.join(self.interpreter.environment.source_dir, s)
+
+        if s not in self.interpreter.build_def_files and not is_built:
+            self.interpreter.build_def_files.append(s)
+
+        return self._load_file(s)
+
+
+def initialize(*args, **kwargs):
+    return KeyvalModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/modtest.py b/meson/mesonbuild/modules/modtest.py
new file mode 100644
index 000000000..dd2e2ff8d
--- /dev/null
+++ b/meson/mesonbuild/modules/modtest.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import ExtensionModule
+from ..interpreterbase import noKwargs
+
+class TestModule(ExtensionModule):
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'print_hello': self.print_hello,
+        })
+
+    @noKwargs
+    def print_hello(self, state, args, kwargs):
+        print('Hello from a Meson module')
+
+def initialize(*args, **kwargs):
+    return TestModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/pkgconfig.py b/meson/mesonbuild/modules/pkgconfig.py
new file mode 100644
index 000000000..c6eaedca5
--- /dev/null
+++ b/meson/mesonbuild/modules/pkgconfig.py
@@ -0,0 +1,591 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pathlib import PurePath
+
+from .. import build
+from .. import dependencies
+from ..dependencies import ThreadDependency
+from .. import mesonlib
+from .. import mlog
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+
+already_warned_objs = set()
+
+class DependenciesHelper:
+    def __init__(self, state, name):
+        self.state = state
+        self.name = name
+        self.pub_libs = []
+        self.pub_reqs = []
+        self.priv_libs = []
+        self.priv_reqs = []
+        self.cflags = []
+        self.version_reqs = {}
+        self.link_whole_targets = []
+
+    def add_pub_libs(self, libs):
+        libs, reqs, cflags = self._process_libs(libs, True)
+        self.pub_libs = libs + self.pub_libs # prepend to preserve dependencies
+        self.pub_reqs += reqs
+        self.cflags += cflags
+
+    def add_priv_libs(self, libs):
+        libs, reqs, _ = self._process_libs(libs, False)
+        self.priv_libs = libs + self.priv_libs
+        self.priv_reqs += reqs
+
+    def add_pub_reqs(self, reqs):
+        self.pub_reqs += self._process_reqs(reqs)
+
+    def add_priv_reqs(self, reqs):
+        self.priv_reqs += self._process_reqs(reqs)
+
+    def _check_generated_pc_deprecation(self, obj):
+        if not hasattr(obj, 'generated_pc_warn'):
+            return
+        name = obj.generated_pc_warn[0]
+        if (name, obj.name) in already_warned_objs:
+            return
+        mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
+                         '"libraries" keyword argument of a previous call '
+                         'to generate() method instead of first positional '
+                         'argument.', 'Adding', mlog.bold(obj.generated_pc),
+                         'to "Requires" field, but this is a deprecated '
+                         'behaviour that will change in a future version '
+                         'of Meson. Please report the issue if this '
+                         'warning cannot be avoided in your case.',
+                         location=obj.generated_pc_warn[1])
+        already_warned_objs.add((name, obj.name))
+
+    def _process_reqs(self, reqs):
+        '''Returns string names of requirements'''
+        processed_reqs = []
+        for obj in mesonlib.listify(reqs):
+            if not isinstance(obj, str):
+                FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
+            if hasattr(obj, 'generated_pc'):
+                self._check_generated_pc_deprecation(obj)
+                processed_reqs.append(obj.generated_pc)
+            elif hasattr(obj, 'pcdep'):
+                pcdeps = mesonlib.listify(obj.pcdep)
+                for d in pcdeps:
+                    processed_reqs.append(d.name)
+                    self.add_version_reqs(d.name, obj.version_reqs)
+            elif isinstance(obj, dependencies.PkgConfigDependency):
+                if obj.found():
+                    processed_reqs.append(obj.name)
+                    self.add_version_reqs(obj.name, obj.version_reqs)
+            elif isinstance(obj, str):
+                name, version_req = self.split_version_req(obj)
+                processed_reqs.append(name)
+                self.add_version_reqs(name, version_req)
+            elif isinstance(obj, dependencies.Dependency) and not obj.found():
+                pass
+            elif isinstance(obj, ThreadDependency):
+                pass
+            else:
+                raise mesonlib.MesonException('requires argument not a string, '
+                                              'library with pkgconfig-generated file '
+                                              'or pkgconfig-dependency object, '
+                                              'got {!r}'.format(obj))
+        return processed_reqs
+
+    def add_cflags(self, cflags):
+        self.cflags += mesonlib.stringlistify(cflags)
+
+    def _process_libs(self, libs, public: bool):
+        libs = mesonlib.listify(libs)
+        processed_libs = []
+        processed_reqs = []
+        processed_cflags = []
+        for obj in libs:
+            if hasattr(obj, 'pcdep'):
+                pcdeps = mesonlib.listify(obj.pcdep)
+                for d in pcdeps:
+                    processed_reqs.append(d.name)
+                    self.add_version_reqs(d.name, obj.version_reqs)
+            elif hasattr(obj, 'generated_pc'):
+                self._check_generated_pc_deprecation(obj)
+                processed_reqs.append(obj.generated_pc)
+            elif isinstance(obj, dependencies.PkgConfigDependency):
+                if obj.found():
+                    processed_reqs.append(obj.name)
+                    self.add_version_reqs(obj.name, obj.version_reqs)
+            elif isinstance(obj, dependencies.InternalDependency):
+                if obj.found():
+                    processed_libs += obj.get_link_args()
+                    processed_cflags += obj.get_compile_args()
+                    self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True)
+            elif isinstance(obj, dependencies.Dependency):
+                if obj.found():
+                    processed_libs += obj.get_link_args()
+                    processed_cflags += obj.get_compile_args()
+            elif isinstance(obj, build.SharedLibrary) and obj.shared_library_only:
+                # Do not pull dependencies for shared libraries because they are
+                # only required for static linking. Adding private requires has
+                # the side effect of exposing their cflags, which is the
+                # intended behaviour of pkg-config but force Debian to add more
+                # than needed build deps.
+                # See https://bugs.freedesktop.org/show_bug.cgi?id=105572
+                processed_libs.append(obj)
+            elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
+                processed_libs.append(obj)
+                # If there is a static library in `Libs:` all its deps must be
+                # public too, otherwise the generated pc file will never be
+                # usable without --static.
+                self._add_lib_dependencies(obj.link_targets,
+                                           obj.link_whole_targets,
+                                           obj.external_deps,
+                                           isinstance(obj, build.StaticLibrary) and public)
+            elif isinstance(obj, (build.CustomTarget, build.CustomTargetIndex)):
+                if not obj.is_linkable_target():
+                    raise mesonlib.MesonException('library argument contains a not linkable custom_target.')
+                FeatureNew.single_use('custom_target in pkgconfig.generate libraries', '0.58.0', self.state.subproject)
+                processed_libs.append(obj)
+            elif isinstance(obj, str):
+                processed_libs.append(obj)
+            else:
+                raise mesonlib.MesonException(f'library argument of type {type(obj).__name__} not a string, library or dependency object.')
+
+        return processed_libs, processed_reqs, processed_cflags
+
+    def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public, private_external_deps=False):
+        add_libs = self.add_pub_libs if public else self.add_priv_libs
+        # Recursively add all linked libraries
+        for t in link_targets:
+            # Internal libraries (uninstalled static library) will be promoted
+            # to link_whole, treat them as such here.
+            if t.is_internal():
+                self._add_link_whole(t, public)
+            else:
+                add_libs([t])
+        for t in link_whole_targets:
+            self._add_link_whole(t, public)
+        # And finally its external dependencies
+        if private_external_deps:
+            self.add_priv_libs(external_deps)
+        else:
+            add_libs(external_deps)
+
+    def _add_link_whole(self, t, public):
+        # Don't include static libraries that we link_whole. But we still need to
+        # include their dependencies: a static library we link_whole
+        # could itself link to a shared library or an installed static library.
+        # Keep track of link_whole_targets so we can remove them from our
+        # lists in case a library is link_with and link_whole at the same time.
+        # See remove_dups() below.
+        self.link_whole_targets.append(t)
+        self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
+
+    def add_version_reqs(self, name, version_reqs):
+        if version_reqs:
+            if name not in self.version_reqs:
+                self.version_reqs[name] = set()
+            # Note that pkg-config is picky about whitespace.
+            # 'foo > 1.2' is ok but 'foo>1.2' is not.
+            # foo, bar' is ok, but 'foo,bar' is not.
+            new_vreqs = [s for s in mesonlib.stringlistify(version_reqs)]
+            self.version_reqs[name].update(new_vreqs)
+
+    def split_version_req(self, s):
+        for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+            pos = s.find(op)
+            if pos > 0:
+                return s[0:pos].strip(), s[pos:].strip()
+        return s, None
+
+    def format_vreq(self, vreq):
+        # vreq are '>=1.0' and pkgconfig wants '>= 1.0'
+        for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+            if vreq.startswith(op):
+                return op + ' ' + vreq[len(op):]
+        return vreq
+
+    def format_reqs(self, reqs):
+        result = []
+        for name in reqs:
+            vreqs = self.version_reqs.get(name, None)
+            if vreqs:
+                result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs]
+            else:
+                result += [name]
+        return ', '.join(result)
+
+    def remove_dups(self):
+        # Set of ids that have already been handled and should not be added any more
+        exclude = set()
+
+        # We can't just check if 'x' is excluded because we could have copies of
+        # the same SharedLibrary object for example.
+        def _ids(x):
+            if hasattr(x, 'generated_pc'):
+                yield x.generated_pc
+            if isinstance(x, build.Target):
+                yield x.get_id()
+            yield x
+
+        # Exclude 'x' in all its forms and return if it was already excluded
+        def _add_exclude(x):
+            was_excluded = False
+            for i in _ids(x):
+                if i in exclude:
+                    was_excluded = True
+                else:
+                    exclude.add(i)
+            return was_excluded
+
+        # link_whole targets are already part of other targets, exclude them all.
+        for t in self.link_whole_targets:
+            _add_exclude(t)
+
+        def _fn(xs, libs=False):
+            # Remove duplicates whilst preserving original order
+            result = []
+            for x in xs:
+                # Don't de-dup unknown strings to avoid messing up arguments like:
+                # ['-framework', 'CoreAudio', '-framework', 'CoreMedia']
+                known_flags = ['-pthread']
+                cannot_dedup = libs and isinstance(x, str) and \
+                    not x.startswith(('-l', '-L')) and \
+                    x not in known_flags
+                if not cannot_dedup and _add_exclude(x):
+                    continue
+                result.append(x)
+            return result
+
+        # Handle lists in priority order: public items can be excluded from
+        # private and Requires can excluded from Libs.
+        self.pub_reqs = _fn(self.pub_reqs)
+        self.pub_libs = _fn(self.pub_libs, True)
+        self.priv_reqs = _fn(self.priv_reqs)
+        self.priv_libs = _fn(self.priv_libs, True)
+        # Reset exclude list just in case some values can be both cflags and libs.
+        exclude = set()
+        self.cflags = _fn(self.cflags)
+
+class PkgConfigModule(ExtensionModule):
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'generate': self.generate,
+        })
+
+    def _get_lname(self, l, msg, pcfile, is_custom_target):
+        if is_custom_target:
+            basename = os.path.basename(l.get_filename())
+            name = os.path.splitext(basename)[0]
+            if name.startswith('lib'):
+                name = name[3:]
+            return name
+        # Nothing special
+        if not l.name_prefix_set:
+            return l.name
+        # Sometimes people want the library to start with 'lib' everywhere,
+        # which is achieved by setting name_prefix to '' and the target name to
+        # 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct.
+        if l.prefix == '' and l.name.startswith('lib'):
+            return l.name[3:]
+        # If the library is imported via an import library which is always
+        # named after the target name, '-lfoo' is correct.
+        if isinstance(l, build.SharedLibrary) and l.import_filename:
+            return l.name
+        # In other cases, we can't guarantee that the compiler will be able to
+        # find the library via '-lfoo', so tell the user that.
+        mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile))
+        return l.name
+
+    def _escape(self, value):
+        '''
+        We cannot use quote_arg because it quotes with ' and " which does not
+        work with pkg-config and pkgconf at all.
+        '''
+        # We should always write out paths with / because pkg-config requires
+        # spaces to be quoted with \ and that messes up on Windows:
+        # https://bugs.freedesktop.org/show_bug.cgi?id=103203
+        if isinstance(value, PurePath):
+            value = value.as_posix()
+        return value.replace(' ', r'\ ')
+
+    def _make_relative(self, prefix, subdir):
+        prefix = PurePath(prefix)
+        subdir = PurePath(subdir)
+        try:
+            return subdir.relative_to(prefix).as_posix()
+        except ValueError:
+            return subdir.as_posix()
+
+    def _generate_pkgconfig_file(self, state, deps, subdirs, name, description,
+                                 url, version, pcfile, conflicts, variables,
+                                 unescaped_variables, uninstalled=False, dataonly=False):
+        coredata = state.environment.get_coredata()
+        if uninstalled:
+            outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
+            if not os.path.exists(outdir):
+                os.mkdir(outdir)
+            prefix = PurePath(state.environment.get_build_dir())
+            srcdir = PurePath(state.environment.get_source_dir())
+        else:
+            outdir = state.environment.scratch_dir
+            prefix = PurePath(coredata.get_option(mesonlib.OptionKey('prefix')))
+        # These always return paths relative to prefix
+        libdir = PurePath(coredata.get_option(mesonlib.OptionKey('libdir')))
+        incdir = PurePath(coredata.get_option(mesonlib.OptionKey('includedir')))
+        fname = os.path.join(outdir, pcfile)
+        with open(fname, 'w', encoding='utf-8') as ofile:
+            if not dataonly:
+                ofile.write('prefix={}\n'.format(self._escape(prefix)))
+                if uninstalled:
+                    ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
+                ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
+                ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
+            if variables or unescaped_variables:
+                ofile.write('\n')
+            for k, v in variables:
+                ofile.write('{}={}\n'.format(k, self._escape(v)))
+            for k, v in unescaped_variables:
+                ofile.write(f'{k}={v}\n')
+            ofile.write('\n')
+            ofile.write('Name: %s\n' % name)
+            if len(description) > 0:
+                ofile.write('Description: %s\n' % description)
+            if len(url) > 0:
+                ofile.write('URL: %s\n' % url)
+            ofile.write('Version: %s\n' % version)
+            reqs_str = deps.format_reqs(deps.pub_reqs)
+            if len(reqs_str) > 0:
+                ofile.write(f'Requires: {reqs_str}\n')
+            reqs_str = deps.format_reqs(deps.priv_reqs)
+            if len(reqs_str) > 0:
+                ofile.write(f'Requires.private: {reqs_str}\n')
+            if len(conflicts) > 0:
+                ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
+
+            def generate_libs_flags(libs):
+                msg = 'Library target {0!r} has {1!r} set. Compilers ' \
+                      'may not find it from its \'-l{2}\' linker flag in the ' \
+                      '{3!r} pkg-config file.'
+                Lflags = []
+                for l in libs:
+                    if isinstance(l, str):
+                        yield l
+                    else:
+                        if uninstalled:
+                            install_dir = os.path.dirname(state.backend.get_target_filename_abs(l))
+                        else:
+                            install_dir = l.get_custom_install_dir()[0]
+                        if install_dir is False:
+                            continue
+                        is_custom_target = isinstance(l, (build.CustomTarget, build.CustomTargetIndex))
+                        if not is_custom_target and 'cs' in l.compilers:
+                            if isinstance(install_dir, str):
+                                Lflag = '-r${{prefix}}/{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename)
+                            else:  # install_dir is True
+                                Lflag = '-r${libdir}/%s' % l.filename
+                        else:
+                            if isinstance(install_dir, str):
+                                Lflag = '-L${prefix}/%s' % self._escape(self._make_relative(prefix, install_dir))
+                            else:  # install_dir is True
+                                Lflag = '-L${libdir}'
+                        if Lflag not in Lflags:
+                            Lflags.append(Lflag)
+                            yield Lflag
+                        lname = self._get_lname(l, msg, pcfile, is_custom_target)
+                        # If using a custom suffix, the compiler may not be able to
+                        # find the library
+                        if not is_custom_target and l.name_suffix_set:
+                            mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile))
+                        if is_custom_target or 'cs' not in l.compilers:
+                            yield '-l%s' % lname
+
+            def get_uninstalled_include_dirs(libs):
+                result = []
+                for l in libs:
+                    if isinstance(l, (str, build.CustomTarget, build.CustomTargetIndex)):
+                        continue
+                    if l.get_subdir() not in result:
+                        result.append(l.get_subdir())
+                    for i in l.get_include_dirs():
+                        curdir = i.get_curdir()
+                        for d in i.get_incdirs():
+                            path = os.path.join(curdir, d)
+                            if path not in result:
+                                result.append(path)
+                return result
+
+            def generate_uninstalled_cflags(libs):
+                for d in get_uninstalled_include_dirs(libs):
+                    for basedir in ['${prefix}', '${srcdir}']:
+                        path = PurePath(basedir, d)
+                        yield '-I%s' % self._escape(path.as_posix())
+
+            if len(deps.pub_libs) > 0:
+                ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
+            if len(deps.priv_libs) > 0:
+                ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
+
+            cflags = []
+            if uninstalled:
+                cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
+            else:
+                for d in subdirs:
+                    if d == '.':
+                        cflags.append('-I${includedir}')
+                    else:
+                        cflags.append(self._escape(PurePath('-I${includedir}') / d))
+            cflags += [self._escape(f) for f in deps.cflags]
+            if cflags and not dataonly:
+                ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
+
+    @FeatureNewKwargs('pkgconfig.generate', '0.59.0', ['unescaped_variables', 'unescaped_uninstalled_variables'])
+    @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
+    @FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
+    @FeatureNewKwargs('pkgconfig.generate', '0.41.0', ['variables'])
+    @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['dataonly'])
+    @permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase',
+                      'subdirs', 'requires', 'requires_private', 'libraries_private',
+                      'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
+                      'dataonly', 'conflicts', 'uninstalled_variables',
+                      'unescaped_variables', 'unescaped_uninstalled_variables'})
+    def generate(self, state, args, kwargs):
+        default_version = state.project_version['version']
+        default_install_dir = None
+        default_description = None
+        default_name = None
+        mainlib = None
+        default_subdirs = ['.']
+        if not args and 'version' not in kwargs:
+            FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
+        elif len(args) == 1:
+            FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
+            mainlib = args[0]
+            if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
+                raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
+            default_name = mainlib.name
+            default_description = state.project_name + ': ' + mainlib.name
+            install_dir = mainlib.get_custom_install_dir()[0]
+            if isinstance(install_dir, str):
+                default_install_dir = os.path.join(install_dir, 'pkgconfig')
+        elif len(args) > 1:
+            raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
+
+        dataonly = kwargs.get('dataonly', False)
+        if not isinstance(dataonly, bool):
+            raise mesonlib.MesonException('dataonly must be boolean.')
+        if dataonly:
+            default_subdirs = []
+            blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs']
+            if any(k in kwargs for k in blocked_vars):
+                raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}')
+
+        subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs))
+        version = kwargs.get('version', default_version)
+        if not isinstance(version, str):
+            raise mesonlib.MesonException('Version must be specified.')
+        name = kwargs.get('name', default_name)
+        if not isinstance(name, str):
+            raise mesonlib.MesonException('Name not specified.')
+        filebase = kwargs.get('filebase', name)
+        if not isinstance(filebase, str):
+            raise mesonlib.MesonException('Filebase must be a string.')
+        description = kwargs.get('description', default_description)
+        if not isinstance(description, str):
+            raise mesonlib.MesonException('Description is not a string.')
+        url = kwargs.get('url', '')
+        if not isinstance(url, str):
+            raise mesonlib.MesonException('URL is not a string.')
+        conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
+
+        # Prepend the main library to public libraries list. This is required
+        # so dep.add_pub_libs() can handle dependency ordering correctly and put
+        # extra libraries after the main library.
+        libraries = mesonlib.extract_as_list(kwargs, 'libraries')
+        if mainlib:
+            libraries = [mainlib] + libraries
+
+        deps = DependenciesHelper(state, filebase)
+        deps.add_pub_libs(libraries)
+        deps.add_priv_libs(kwargs.get('libraries_private', []))
+        deps.add_pub_reqs(kwargs.get('requires', []))
+        deps.add_priv_reqs(kwargs.get('requires_private', []))
+        deps.add_cflags(kwargs.get('extra_cflags', []))
+
+        dversions = kwargs.get('d_module_versions', None)
+        if dversions:
+            compiler = state.environment.coredata.compilers.host.get('d')
+            if compiler:
+                deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
+
+        deps.remove_dups()
+
+        def parse_variable_list(vardict):
+            reserved = ['prefix', 'libdir', 'includedir']
+            variables = []
+            for name, value in vardict.items():
+                if not dataonly and name in reserved:
+                    raise mesonlib.MesonException(f'Variable "{name}" is reserved')
+                variables.append((name, value))
+            return variables
+
+        variables = self.interpreter.extract_variables(kwargs, dict_new=True)
+        variables = parse_variable_list(variables)
+        unescaped_variables = self.interpreter.extract_variables(kwargs, argname='unescaped_variables')
+        unescaped_variables = parse_variable_list(unescaped_variables)
+
+        pcfile = filebase + '.pc'
+        pkgroot = kwargs.get('install_dir', default_install_dir)
+        if pkgroot is None:
+            if mesonlib.is_freebsd():
+                pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('prefix')), 'libdata', 'pkgconfig')
+            else:
+                pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'pkgconfig')
+        if not isinstance(pkgroot, str):
+            raise mesonlib.MesonException('Install_dir must be a string.')
+        self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+                                     version, pcfile, conflicts, variables,
+                                     unescaped_variables, False, dataonly)
+        res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, None, state.subproject)
+        variables = self.interpreter.extract_variables(kwargs, argname='uninstalled_variables', dict_new=True)
+        variables = parse_variable_list(variables)
+        unescaped_variables = self.interpreter.extract_variables(kwargs, argname='unescaped_uninstalled_variables')
+        unescaped_variables = parse_variable_list(unescaped_variables)
+
+        pcfile = filebase + '-uninstalled.pc'
+        self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+                                     version, pcfile, conflicts, variables,
+                                     unescaped_variables, uninstalled=True, dataonly=dataonly)
+        # Associate the main library with this generated pc file. If the library
+        # is used in any subsequent call to the generated, it will generate a
+        # 'Requires:' or 'Requires.private:'.
+        # Backward compatibility: We used to set 'generated_pc' on all public
+        # libraries instead of just the main one. Keep doing that but warn if
+        # anyone is relying on that deprecated behaviour.
+        if mainlib:
+            if not hasattr(mainlib, 'generated_pc'):
+                mainlib.generated_pc = filebase
+            else:
+                mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name))
+        else:
+            for lib in deps.pub_libs:
+                if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
+                    lib.generated_pc = filebase
+                    location = state.current_node
+                    lib.generated_pc_warn = [name, location]
+        return ModuleReturnValue(res, [res])
+
+def initialize(*args, **kwargs):
+    return PkgConfigModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/python.py b/meson/mesonbuild/modules/python.py
new file mode 100644
index 000000000..c4ebbc77b
--- /dev/null
+++ b/meson/mesonbuild/modules/python.py
@@ -0,0 +1,661 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+import shutil
+import typing as T
+
+from pathlib import Path
+from .. import mesonlib
+from ..mesonlib import MachineChoice, MesonException
+from . import ExtensionModule
+from ..interpreterbase import (
+    noPosargs, noKwargs, permittedKwargs,
+    InvalidArguments,
+    FeatureNew, FeatureNewKwargs, disablerIfNotFound
+)
+from ..interpreter import ExternalProgramHolder, extract_required_kwarg, permitted_dependency_kwargs
+from ..build import known_shmod_kwargs
+from .. import mlog
+from ..environment import detect_cpu_family
+from ..dependencies import DependencyMethods, PkgConfigDependency, NotFoundDependency, SystemDependency
+from ..programs import ExternalProgram, NonExistingExternalProgram
+
+mod_kwargs = {'subdir'}
+mod_kwargs.update(known_shmod_kwargs)
+mod_kwargs -= {'name_prefix', 'name_suffix'}
+
+class PythonDependency(SystemDependency):
+
+    def __init__(self, python_holder, environment, kwargs):
+        super().__init__('python', environment, kwargs)
+        self.name = 'python'
+        self.static = kwargs.get('static', False)
+        self.embed = kwargs.get('embed', False)
+        self.version = python_holder.version
+        self.platform = python_holder.platform
+        self.pkgdep = None
+        self.variables = python_holder.variables
+        self.paths = python_holder.paths
+        self.link_libpython = python_holder.link_libpython
+        self.info: T.Optional[T.Dict[str, str]] = None
+        if mesonlib.version_compare(self.version, '>= 3.0'):
+            self.major_version = 3
+        else:
+            self.major_version = 2
+
+        # We first try to find the necessary python variables using pkgconfig
+        if DependencyMethods.PKGCONFIG in self.methods and not python_holder.is_pypy:
+            pkg_version = self.variables.get('LDVERSION') or self.version
+            pkg_libdir = self.variables.get('LIBPC')
+            pkg_embed = '-embed' if self.embed and mesonlib.version_compare(self.version, '>=3.8') else ''
+            pkg_name = f'python-{pkg_version}{pkg_embed}'
+
+            # If python-X.Y.pc exists in LIBPC, we will try to use it
+            if pkg_libdir is not None and Path(os.path.join(pkg_libdir, f'{pkg_name}.pc')).is_file():
+                old_pkg_libdir = os.environ.get('PKG_CONFIG_LIBDIR')
+                old_pkg_path = os.environ.get('PKG_CONFIG_PATH')
+
+                os.environ.pop('PKG_CONFIG_PATH', None)
+
+                if pkg_libdir:
+                    os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir
+
+                try:
+                    self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs)
+                    mlog.debug(f'Found "{pkg_name}" via pkgconfig lookup in LIBPC ({pkg_libdir})')
+                    py_lookup_method = 'pkgconfig'
+                except MesonException as e:
+                    mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir})')
+                    mlog.debug(e)
+
+                if old_pkg_path is not None:
+                    os.environ['PKG_CONFIG_PATH'] = old_pkg_path
+
+                if old_pkg_libdir is not None:
+                    os.environ['PKG_CONFIG_LIBDIR'] = old_pkg_libdir
+                else:
+                    os.environ.pop('PKG_CONFIG_LIBDIR', None)
+            else:
+                mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir}), this is likely due to a relocated python installation')
+
+            # If lookup via LIBPC failed, try to use fallback PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH mechanisms
+            if self.pkgdep is None or not self.pkgdep.found():
+                try:
+                    self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs)
+                    mlog.debug(f'Found "{pkg_name}" via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH')
+                    py_lookup_method = 'pkgconfig-fallback'
+                except MesonException as e:
+                    mlog.debug(f'"{pkg_name}" could not be found via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH')
+                    mlog.debug(e)
+
+        if self.pkgdep and self.pkgdep.found():
+            self.compile_args = self.pkgdep.get_compile_args()
+            self.link_args = self.pkgdep.get_link_args()
+            self.is_found = True
+            self.pcdep = self.pkgdep
+        else:
+            self.pkgdep = None
+
+            # Finally, try to find python via SYSCONFIG as a final measure
+            if DependencyMethods.SYSCONFIG in self.methods:
+                if mesonlib.is_windows():
+                    self._find_libpy_windows(environment)
+                else:
+                    self._find_libpy(python_holder, environment)
+                if self.is_found:
+                    mlog.debug(f'Found "python-{self.version}" via SYSCONFIG module')
+                    py_lookup_method = 'sysconfig'
+
+        if self.is_found:
+            mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green(f'YES ({py_lookup_method})'))
+        else:
+            mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
+
+    def _find_libpy(self, python_holder, environment):
+        if python_holder.is_pypy:
+            if self.major_version == 3:
+                libname = 'pypy3-c'
+            else:
+                libname = 'pypy-c'
+            libdir = os.path.join(self.variables.get('base'), 'bin')
+            libdirs = [libdir]
+        else:
+            libname = f'python{self.version}'
+            if 'DEBUG_EXT' in self.variables:
+                libname += self.variables['DEBUG_EXT']
+            if 'ABIFLAGS' in self.variables:
+                libname += self.variables['ABIFLAGS']
+            libdirs = []
+
+        largs = self.clib_compiler.find_library(libname, environment, libdirs)
+        if largs is not None:
+            self.link_args = largs
+
+        self.is_found = largs is not None or self.link_libpython
+
+        inc_paths = mesonlib.OrderedSet([
+            self.variables.get('INCLUDEPY'),
+            self.paths.get('include'),
+            self.paths.get('platinclude')])
+
+        self.compile_args += ['-I' + path for path in inc_paths if path]
+
+    def get_windows_python_arch(self):
+        if self.platform == 'mingw':
+            pycc = self.variables.get('CC')
+            if pycc.startswith('x86_64'):
+                return '64'
+            elif pycc.startswith(('i686', 'i386')):
+                return '32'
+            else:
+                mlog.log('MinGW Python built with unknown CC {!r}, please file'
+                         'a bug'.format(pycc))
+                return None
+        elif self.platform == 'win32':
+            return '32'
+        elif self.platform in ('win64', 'win-amd64'):
+            return '64'
+        mlog.log(f'Unknown Windows Python platform {self.platform!r}')
+        return None
+
+    def get_windows_link_args(self):
+        if self.platform.startswith('win'):
+            vernum = self.variables.get('py_version_nodot')
+            if self.static:
+                libpath = Path('libs') / f'libpython{vernum}.a'
+            else:
+                comp = self.get_compiler()
+                if comp.id == "gcc":
+                    libpath = f'python{vernum}.dll'
+                else:
+                    libpath = Path('libs') / f'python{vernum}.lib'
+            lib = Path(self.variables.get('base')) / libpath
+        elif self.platform == 'mingw':
+            if self.static:
+                libname = self.variables.get('LIBRARY')
+            else:
+                libname = self.variables.get('LDLIBRARY')
+            lib = Path(self.variables.get('LIBDIR')) / libname
+        if not lib.exists():
+            mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
+            return None
+        return [str(lib)]
+
+    def _find_libpy_windows(self, env):
+        '''
+        Find python3 libraries on Windows and also verify that the arch matches
+        what we are building for.
+        '''
+        pyarch = self.get_windows_python_arch()
+        if pyarch is None:
+            self.is_found = False
+            return
+        arch = detect_cpu_family(env.coredata.compilers.host)
+        if arch == 'x86':
+            arch = '32'
+        elif arch == 'x86_64':
+            arch = '64'
+        else:
+            # We can't cross-compile Python 3 dependencies on Windows yet
+            mlog.log(f'Unknown architecture {arch!r} for',
+                     mlog.bold(self.name))
+            self.is_found = False
+            return
+        # Pyarch ends in '32' or '64'
+        if arch != pyarch:
+            mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but '
+                     'found {}-bit'.format(arch, pyarch))
+            self.is_found = False
+            return
+        # This can fail if the library is not found
+        largs = self.get_windows_link_args()
+        if largs is None:
+            self.is_found = False
+            return
+        self.link_args = largs
+        # Compile args
+        inc_paths = mesonlib.OrderedSet([
+            self.variables.get('INCLUDEPY'),
+            self.paths.get('include'),
+            self.paths.get('platinclude')])
+
+        self.compile_args += ['-I' + path for path in inc_paths if path]
+
+        # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/
+        if pyarch == '64' and self.major_version == 2:
+            self.compile_args += ['-DMS_WIN64']
+
+        self.is_found = True
+
+    @staticmethod
+    def get_methods():
+        if mesonlib.is_windows():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+        elif mesonlib.is_osx():
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+        else:
+            return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+
+    def get_pkgconfig_variable(self, variable_name, kwargs):
+        if self.pkgdep:
+            return self.pkgdep.get_pkgconfig_variable(variable_name, kwargs)
+        else:
+            return super().get_pkgconfig_variable(variable_name, kwargs)
+
+
+INTROSPECT_COMMAND = '''import sysconfig
+import json
+import sys
+
+install_paths = sysconfig.get_paths(scheme='posix_prefix', vars={'base': '', 'platbase': '', 'installed_base': ''})
+
+def links_against_libpython():
+    from distutils.core import Distribution, Extension
+    cmd = Distribution().get_command_obj('build_ext')
+    cmd.ensure_finalized()
+    return bool(cmd.get_libraries(Extension('dummy', [])))
+
+print (json.dumps ({
+  'variables': sysconfig.get_config_vars(),
+  'paths': sysconfig.get_paths(),
+  'install_paths': install_paths,
+  'sys_paths': sys.path,
+  'version': sysconfig.get_python_version(),
+  'platform': sysconfig.get_platform(),
+  'is_pypy': '__pypy__' in sys.builtin_module_names,
+  'link_libpython': links_against_libpython(),
+}))
+'''
+
+if T.TYPE_CHECKING:
+    class PythonIntrospectionDict(TypedDict):
+
+        install_paths: T.Dict[str, str]
+        is_pypy: bool
+        link_libpython: bool
+        paths: T.Dict[str, str]
+        platform: str
+        suffix : str
+        variables: T.Dict[str, str]
+        version: str
+
+class PythonExternalProgram(ExternalProgram):
+    def __init__(self, name: str, command: T.Optional[T.List[str]] = None, ext_prog: T.Optional[ExternalProgram] = None):
+        if ext_prog is None:
+            super().__init__(name, command=command, silent=True)
+        else:
+            self.name = name
+            self.command = ext_prog.command
+            self.path = ext_prog.path
+        self.info: 'PythonIntrospectionDict' = {
+            'install_paths': {},
+            'is_pypy': False,
+            'link_libpython': False,
+            'paths': {},
+            'platform': 'sentinal',
+            'variables': {},
+            'version': '0.0',
+        }
+
+    def _check_version(self, version: str) -> bool:
+        if self.name == 'python2':
+            return mesonlib.version_compare(version, '< 3.0')
+        elif self.name == 'python3':
+            return mesonlib.version_compare(version, '>= 3.0')
+        return True
+
+    def sanity(self) -> bool:
+        # Sanity check, we expect to have something that at least quacks in tune
+        cmd = self.get_command() + ['-c', INTROSPECT_COMMAND]
+        p, stdout, stderr = mesonlib.Popen_safe(cmd)
+        try:
+            info = json.loads(stdout)
+        except json.JSONDecodeError:
+            info = None
+            mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode))
+            mlog.debug('Program stdout:\n')
+            mlog.debug(stdout)
+            mlog.debug('Program stderr:\n')
+            mlog.debug(stderr)
+
+        if info is not None and self._check_version(info['version']):
+            variables = info['variables']
+            info['suffix'] = variables.get('EXT_SUFFIX') or variables.get('SO') or variables.get('.so')
+            self.info = T.cast('PythonIntrospectionDict', info)
+            self.platlib = self._get_path('platlib')
+            self.purelib = self._get_path('purelib')
+            return True
+        else:
+            return False
+
+    def _get_path(self, key: str) -> None:
+        user_dir = str(Path.home())
+        sys_paths = self.info['sys_paths']
+        rel_path = self.info['install_paths'][key][1:]
+        if not any(p.endswith(rel_path) for p in sys_paths if not p.startswith(user_dir)):
+            # On Debian derivatives sysconfig install path is broken and is not
+            # included in the locations python actually lookup.
+            # See https://github.com/mesonbuild/meson/issues/8739.
+            mlog.warning('Broken python installation detected. Python files',
+                         'installed by Meson might not be found by python interpreter.',
+                         once=True)
+            if mesonlib.is_debianlike():
+                rel_path = 'lib/python3/dist-packages'
+        return rel_path
+
+
+class PythonInstallation(ExternalProgramHolder):
+    def __init__(self, python, interpreter):
+        ExternalProgramHolder.__init__(self, python, interpreter)
+        info = python.info
+        prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+        self.variables = info['variables']
+        self.suffix = info['suffix']
+        self.paths = info['paths']
+        self.platlib_install_path = os.path.join(prefix, python.platlib)
+        self.purelib_install_path = os.path.join(prefix, python.purelib)
+        self.version = info['version']
+        self.platform = info['platform']
+        self.is_pypy = info['is_pypy']
+        self.link_libpython = info['link_libpython']
+        self.methods.update({
+            'extension_module': self.extension_module_method,
+            'dependency': self.dependency_method,
+            'install_sources': self.install_sources_method,
+            'get_install_dir': self.get_install_dir_method,
+            'language_version': self.language_version_method,
+            'found': self.found_method,
+            'has_path': self.has_path_method,
+            'get_path': self.get_path_method,
+            'has_variable': self.has_variable_method,
+            'get_variable': self.get_variable_method,
+            'path': self.path_method,
+        })
+
+    @permittedKwargs(mod_kwargs)
+    def extension_module_method(self, args, kwargs):
+        if 'install_dir' in kwargs:
+            if 'subdir' in kwargs:
+                raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive')
+        else:
+            subdir = kwargs.pop('subdir', '')
+            if not isinstance(subdir, str):
+                raise InvalidArguments('"subdir" argument must be a string.')
+
+            kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir)
+
+        # On macOS and some Linux distros (Debian) distutils doesn't link
+        # extensions against libpython. We call into distutils and mirror its
+        # behavior. See https://github.com/mesonbuild/meson/issues/4117
+        if not self.link_libpython:
+            new_deps = []
+            for dep in mesonlib.extract_as_list(kwargs, 'dependencies'):
+                if isinstance(dep, PythonDependency):
+                    dep = dep.get_partial_dependency(compile_args=True)
+                new_deps.append(dep)
+            kwargs['dependencies'] = new_deps
+
+        # msys2's python3 has "-cpython-36m.dll", we have to be clever
+        # FIXME: explain what the specific cleverness is here
+        split, suffix = self.suffix.rsplit('.', 1)
+        args[0] += split
+
+        kwargs['name_prefix'] = ''
+        kwargs['name_suffix'] = suffix
+
+        return self.interpreter.func_shared_module(None, args, kwargs)
+
+    @permittedKwargs(permitted_dependency_kwargs | {'embed'})
+    @FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed'])
+    def dependency_method(self, args, kwargs):
+        if args:
+            mlog.warning('python_installation.dependency() does not take any '
+                         'positional arguments. It always returns a Python '
+                         'dependency. This will become an error in the future.',
+                         location=self.interpreter.current_node)
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Dependency', mlog.bold('python'), 'skipped: feature', mlog.bold(feature), 'disabled')
+            dep = NotFoundDependency(self.interpreter.environment)
+        else:
+            dep = PythonDependency(self, self.interpreter.environment, kwargs)
+            if required and not dep.found():
+                raise mesonlib.MesonException('Python dependency not found')
+        return dep
+
+    @permittedKwargs(['pure', 'subdir'])
+    def install_sources_method(self, args, kwargs):
+        pure = kwargs.pop('pure', True)
+        if not isinstance(pure, bool):
+            raise InvalidArguments('"pure" argument must be a boolean.')
+
+        subdir = kwargs.pop('subdir', '')
+        if not isinstance(subdir, str):
+            raise InvalidArguments('"subdir" argument must be a string.')
+
+        if pure:
+            kwargs['install_dir'] = os.path.join(self.purelib_install_path, subdir)
+        else:
+            kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir)
+
+        return self.interpreter.func_install_data(None, args, kwargs)
+
+    @noPosargs
+    @permittedKwargs(['pure', 'subdir'])
+    def get_install_dir_method(self, args, kwargs):
+        pure = kwargs.pop('pure', True)
+        if not isinstance(pure, bool):
+            raise InvalidArguments('"pure" argument must be a boolean.')
+
+        subdir = kwargs.pop('subdir', '')
+        if not isinstance(subdir, str):
+            raise InvalidArguments('"subdir" argument must be a string.')
+
+        if pure:
+            res = os.path.join(self.purelib_install_path, subdir)
+        else:
+            res = os.path.join(self.platlib_install_path, subdir)
+
+        return res
+
+    @noPosargs
+    @noKwargs
+    def language_version_method(self, args, kwargs):
+        return self.version
+
+    @noKwargs
+    def has_path_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InvalidArguments('has_path takes exactly one positional argument.')
+        path_name = args[0]
+        if not isinstance(path_name, str):
+            raise InvalidArguments('has_path argument must be a string.')
+
+        return path_name in self.paths
+
+    @noKwargs
+    def get_path_method(self, args, kwargs):
+        if len(args) not in (1, 2):
+            raise InvalidArguments('get_path must have one or two arguments.')
+        path_name = args[0]
+        if not isinstance(path_name, str):
+            raise InvalidArguments('get_path argument must be a string.')
+
+        try:
+            path = self.paths[path_name]
+        except KeyError:
+            if len(args) == 2:
+                path = args[1]
+            else:
+                raise InvalidArguments(f'{path_name} is not a valid path name')
+
+        return path
+
+    @noKwargs
+    def has_variable_method(self, args, kwargs):
+        if len(args) != 1:
+            raise InvalidArguments('has_variable takes exactly one positional argument.')
+        var_name = args[0]
+        if not isinstance(var_name, str):
+            raise InvalidArguments('has_variable argument must be a string.')
+
+        return var_name in self.variables
+
+    @noKwargs
+    def get_variable_method(self, args, kwargs):
+        if len(args) not in (1, 2):
+            raise InvalidArguments('get_variable must have one or two arguments.')
+        var_name = args[0]
+        if not isinstance(var_name, str):
+            raise InvalidArguments('get_variable argument must be a string.')
+
+        try:
+            var = self.variables[var_name]
+        except KeyError:
+            if len(args) == 2:
+                var = args[1]
+            else:
+                raise InvalidArguments(f'{var_name} is not a valid variable name')
+
+        return var
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('Python module path method', '0.50.0')
+    def path_method(self, args, kwargs):
+        return super().path_method(args, kwargs)
+
+
+class PythonModule(ExtensionModule):
+
+    @FeatureNew('Python Module', '0.46.0')
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.methods.update({
+            'find_installation': self.find_installation,
+        })
+
+    # https://www.python.org/dev/peps/pep-0397/
+    def _get_win_pythonpath(self, name_or_path):
+        if name_or_path not in ['python2', 'python3']:
+            return None
+        if not shutil.which('py'):
+            # program not installed, return without an exception
+            return None
+        ver = {'python2': '-2', 'python3': '-3'}[name_or_path]
+        cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"]
+        _, stdout, _ = mesonlib.Popen_safe(cmd)
+        directory = stdout.strip()
+        if os.path.exists(directory):
+            return os.path.join(directory, 'python')
+        else:
+            return None
+
+
+    @FeatureNewKwargs('python.find_installation', '0.49.0', ['disabler'])
+    @FeatureNewKwargs('python.find_installation', '0.51.0', ['modules'])
+    @disablerIfNotFound
+    @permittedKwargs({'required', 'modules'})
+    def find_installation(self, state, args, kwargs):
+        feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0')
+        disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check)
+        want_modules = mesonlib.extract_as_list(kwargs, 'modules')  # type: T.List[str]
+        found_modules = []    # type: T.List[str]
+        missing_modules = []  # type: T.List[str]
+        fallback = args[0] if args else ''
+        display_name = fallback or 'python'
+
+        if len(args) > 1:
+            raise InvalidArguments('find_installation takes zero or one positional argument.')
+
+        name_or_path = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python')
+        if name_or_path is None and args:
+            name_or_path = fallback
+            if not isinstance(name_or_path, str):
+                raise InvalidArguments('find_installation argument must be a string.')
+
+        if disabled:
+            mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')')
+            return NonExistingExternalProgram()
+
+        if not name_or_path:
+            python = PythonExternalProgram('python3', mesonlib.python_command)
+        else:
+            tmp_python = ExternalProgram.from_entry(display_name, name_or_path)
+            python = PythonExternalProgram(display_name, ext_prog=tmp_python)
+
+            if not python.found() and mesonlib.is_windows():
+                pythonpath = self._get_win_pythonpath(name_or_path)
+                if pythonpath is not None:
+                    name_or_path = pythonpath
+                    python = PythonExternalProgram(name_or_path)
+
+            # Last ditch effort, python2 or python3 can be named python
+            # on various platforms, let's not give up just yet, if an executable
+            # named python is available and has a compatible version, let's use
+            # it
+            if not python.found() and name_or_path in ['python2', 'python3']:
+                python = PythonExternalProgram('python')
+
+        if python.found() and want_modules:
+            for mod in want_modules:
+                p, out, err = mesonlib.Popen_safe(
+                    python.command +
+                    ['-c', f'import {mod}'])
+                if p.returncode != 0:
+                    missing_modules.append(mod)
+                else:
+                    found_modules.append(mod)
+
+        msg = ['Program', python.name]
+        if want_modules:
+            msg.append('({})'.format(', '.join(want_modules)))
+        msg.append('found:')
+        if python.found() and not missing_modules:
+            msg.extend([mlog.green('YES'), '({})'.format(' '.join(python.command))])
+        else:
+            msg.append(mlog.red('NO'))
+        if found_modules:
+            msg.append('modules:')
+            msg.append(', '.join(found_modules))
+
+        mlog.log(*msg)
+
+        if not python.found():
+            if required:
+                raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
+            return NonExistingExternalProgram()
+        elif missing_modules:
+            if required:
+                raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
+            return NonExistingExternalProgram()
+        else:
+            sane = python.sanity()
+
+            if sane:
+                return python
+            else:
+                if required:
+                    raise mesonlib.MesonException(f'{python} is not a valid python or it is missing distutils')
+                return NonExistingExternalProgram()
+
+        raise mesonlib.MesonBugException('Unreachable code was reached (PythonModule.find_installation).')
+
+
+def initialize(*args, **kwargs):
+    mod = PythonModule(*args, **kwargs)
+    mod.interpreter.append_holder_map(PythonExternalProgram, PythonInstallation)
+    return mod
diff --git a/meson/mesonbuild/modules/python3.py b/meson/mesonbuild/modules/python3.py
new file mode 100644
index 000000000..dc1f7c775
--- /dev/null
+++ b/meson/mesonbuild/modules/python3.py
@@ -0,0 +1,81 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sysconfig
+from .. import mesonlib
+
+from . import ExtensionModule
+from ..interpreterbase import noKwargs, permittedKwargs, FeatureDeprecated
+from ..build import known_shmod_kwargs
+from ..programs import ExternalProgram
+
+
+class Python3Module(ExtensionModule):
+    @FeatureDeprecated('python3 module', '0.48.0')
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.methods.update({
+            'extension_module': self.extension_module,
+            'find_python': self.find_python,
+            'language_version': self.language_version,
+            'sysconfig_path': self.sysconfig_path,
+        })
+
+    @permittedKwargs(known_shmod_kwargs)
+    def extension_module(self, state, args, kwargs):
+        if 'name_prefix' in kwargs:
+            raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.')
+        if 'name_suffix' in kwargs:
+            raise mesonlib.MesonException('Name_suffix is set automatically, specifying it is forbidden.')
+        host_system = state.host_machine.system
+        if host_system == 'darwin':
+            # Default suffix is 'dylib' but Python does not use it for extensions.
+            suffix = 'so'
+        elif host_system == 'windows':
+            # On Windows the extension is pyd for some unexplainable reason.
+            suffix = 'pyd'
+        else:
+            suffix = []
+        kwargs['name_prefix'] = ''
+        kwargs['name_suffix'] = suffix
+        return self.interpreter.func_shared_module(None, args, kwargs)
+
+    @noKwargs
+    def find_python(self, state, args, kwargs):
+        command = state.environment.lookup_binary_entry(mesonlib.MachineChoice.HOST, 'python3')
+        if command is not None:
+            py3 = ExternalProgram.from_entry('python3', command)
+        else:
+            py3 = ExternalProgram('python3', mesonlib.python_command, silent=True)
+        return py3
+
+    @noKwargs
+    def language_version(self, state, args, kwargs):
+        return sysconfig.get_python_version()
+
+    @noKwargs
+    def sysconfig_path(self, state, args, kwargs):
+        if len(args) != 1:
+            raise mesonlib.MesonException('sysconfig_path() requires passing the name of path to get.')
+        path_name = args[0]
+        valid_names = sysconfig.get_path_names()
+        if path_name not in valid_names:
+            raise mesonlib.MesonException(f'{path_name} is not a valid path name {valid_names}.')
+
+        # Get a relative path without a prefix, e.g. lib/python3.6/site-packages
+        return sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:]
+
+
+def initialize(*args, **kwargs):
+    return Python3Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/qt.py b/meson/mesonbuild/modules/qt.py
new file mode 100644
index 000000000..207a02564
--- /dev/null
+++ b/meson/mesonbuild/modules/qt.py
@@ -0,0 +1,524 @@
+# Copyright 2015 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import coredata
+import os
+import shutil
+import typing as T
+import xml.etree.ElementTree as ET
+
+from . import ModuleReturnValue, ExtensionModule
+from .. import build
+from .. import mlog
+from ..dependencies import find_external_dependency, Dependency, ExternalLibrary
+from ..mesonlib import MesonException, File, FileOrString, version_compare, Popen_safe
+from . import ModuleReturnValue, ExtensionModule
+from ..interpreter import extract_required_kwarg
+from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs
+from ..programs import ExternalProgram, NonExistingExternalProgram
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..dependencies.qt import QtPkgConfigDependency, QmakeQtDependency
+    from ..interpreter import Interpreter
+    from ..interpreter import kwargs
+
+    QtDependencyType = T.Union[QtPkgConfigDependency, QmakeQtDependency]
+
+    from typing_extensions import TypedDict
+
+    class ResourceCompilerKwArgs(TypedDict):
+
+        """Keyword arguments for the Resource Compiler method."""
+
+        name: T.Optional[str]
+        sources: T.List[FileOrString]
+        extra_args: T.List[str]
+        method: str
+
+    class UICompilerKwArgs(TypedDict):
+
+        """Keyword arguments for the Ui Compiler method."""
+
+        sources: T.Sequence[T.Union[FileOrString, build.CustomTarget]]
+        extra_args: T.List[str]
+        method: str
+
+    class MocCompilerKwArgs(TypedDict):
+
+        """Keyword arguments for the Moc Compiler method."""
+
+        sources: T.List[T.Union[FileOrString, build.CustomTarget]]
+        headers: T.List[T.Union[FileOrString, build.CustomTarget]]
+        extra_args: T.List[str]
+        method: str
+        include_directories: T.List[T.Union[str, build.IncludeDirs]]
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+
+    class PreprocessKwArgs(TypedDict):
+
+        sources: T.List[FileOrString]
+        moc_sources: T.List[T.Union[FileOrString, build.CustomTarget]]
+        moc_headers: T.List[T.Union[FileOrString, build.CustomTarget]]
+        qresources: T.List[FileOrString]
+        ui_files: T.List[T.Union[FileOrString, build.CustomTarget]]
+        moc_extra_arguments: T.List[str]
+        rcc_extra_arguments: T.List[str]
+        uic_extra_arguments: T.List[str]
+        include_directories: T.List[T.Union[str, build.IncludeDirs]]
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+        method: str
+
+    class HasToolKwArgs(kwargs.ExtractRequired):
+
+        method: str
+
+    class CompileTranslationsKwArgs(TypedDict):
+
+        build_by_default: bool
+        install: bool
+        install_dir: T.Optional[str]
+        method: str
+        qresource: T.Optional[str]
+        rcc_extra_arguments: T.List[str]
+        ts_files: T.List[str]
+
+
+class QtBaseModule(ExtensionModule):
+    _tools_detected = False
+    _rcc_supports_depfiles = False
+
+    def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
+        ExtensionModule.__init__(self, interpreter)
+        self.qt_version = qt_version
+        self.tools: T.Dict[str, ExternalProgram] = {
+            'moc': NonExistingExternalProgram('moc'),
+            'uic': NonExistingExternalProgram('uic'),
+            'rcc': NonExistingExternalProgram('rcc'),
+            'lrelease': NonExistingExternalProgram('lrelease'),
+        }
+        self.methods.update({
+            'has_tools': self.has_tools,
+            'preprocess': self.preprocess,
+            'compile_translations': self.compile_translations,
+            'compile_resources': self.compile_resources,
+            'compile_ui': self.compile_ui,
+            'compile_moc': self.compile_moc,
+        })
+
+    def compilers_detect(self, state: 'ModuleState', qt_dep: 'QtDependencyType') -> None:
+        """Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"""
+        # It is important that this list does not change order as the order of
+        # the returned ExternalPrograms will change as well
+        wanted = f'== {qt_dep.version}'
+
+        def gen_bins() -> T.Generator[T.Tuple[str, str], None, None]:
+            for b in self.tools:
+                if qt_dep.bindir:
+                    yield os.path.join(qt_dep.bindir, b), b
+                # prefer the -qt of the tool to the plain one, as we
+                # don't know what the unsuffixed one points to without calling it.
+                yield f'{b}-qt{qt_dep.qtver}', b
+                yield b, b
+
+        for b, name in gen_bins():
+            if self.tools[name].found():
+                continue
+
+            if name == 'lrelease':
+                arg = ['-version']
+            elif version_compare(qt_dep.version, '>= 5'):
+                arg = ['--version']
+            else:
+                arg = ['-v']
+
+            # Ensure that the version of qt and each tool are the same
+            def get_version(p: ExternalProgram) -> str:
+                _, out, err = Popen_safe(p.get_command() + arg)
+                if b.startswith('lrelease') or not qt_dep.version.startswith('4'):
+                    care = out
+                else:
+                    care = err
+                return care.split(' ')[-1].replace(')', '').strip()
+
+            p = state.find_program(b, required=False,
+                                   version_func=get_version,
+                                   wanted=wanted)
+            if p.found():
+                self.tools[name] = p
+
+    def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True) -> None:
+        if self._tools_detected:
+            return
+        self._tools_detected = True
+        mlog.log(f'Detecting Qt{self.qt_version} tools')
+        kwargs = {'required': required, 'modules': 'Core', 'method': method}
+        # Just pick one to make mypy happy
+        qt = T.cast('QtPkgConfigDependency', find_external_dependency(f'qt{self.qt_version}', state.environment, kwargs))
+        if qt.found():
+            # Get all tools and then make sure that they are the right version
+            self.compilers_detect(state, qt)
+            if version_compare(qt.version, '>=5.14.0'):
+                self._rcc_supports_depfiles = True
+            else:
+                mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:',
+                    mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False)
+        else:
+            suffix = f'-qt{self.qt_version}'
+            self.tools['moc'] = NonExistingExternalProgram(name='moc' + suffix)
+            self.tools['uic'] = NonExistingExternalProgram(name='uic' + suffix)
+            self.tools['rcc'] = NonExistingExternalProgram(name='rcc' + suffix)
+            self.tools['lrelease'] = NonExistingExternalProgram(name='lrelease' + suffix)
+
+    @staticmethod
+    def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T.List[str]]:
+        abspath: str
+        if isinstance(rcc_file, str):
+            abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
+            rcc_dirname = os.path.dirname(abspath)
+        else:
+            abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir)
+            rcc_dirname = os.path.dirname(abspath)
+
+        # FIXME: what error are we actually tring to check here?
+        try:
+            tree = ET.parse(abspath)
+            root = tree.getroot()
+            result: T.List[str] = []
+            for child in root[0]:
+                if child.tag != 'file':
+                    mlog.warning("malformed rcc file: ", os.path.join(state.subdir, str(rcc_file)))
+                    break
+                else:
+                    result.append(child.text)
+
+            return rcc_dirname, result
+        except Exception:
+            raise MesonException(f'Unable to parse resource file {abspath}')
+
+    def _parse_qrc_deps(self, state: 'ModuleState', rcc_file: 'FileOrString') -> T.List[File]:
+        rcc_dirname, nodes = self._qrc_nodes(state, rcc_file)
+        result: T.List[File] = []
+        for resource_path in nodes:
+            # We need to guess if the pointed resource is:
+            #   a) in build directory -> implies a generated file
+            #   b) in source directory
+            #   c) somewhere else external dependency file to bundle
+            #
+            # Also from qrc documentation: relative path are always from qrc file
+            # So relative path must always be computed from qrc file !
+            if os.path.isabs(resource_path):
+                # a)
+                if resource_path.startswith(os.path.abspath(state.environment.build_dir)):
+                    resource_relpath = os.path.relpath(resource_path, state.environment.build_dir)
+                    result.append(File(is_built=True, subdir='', fname=resource_relpath))
+                # either b) or c)
+                else:
+                    result.append(File(is_built=False, subdir=state.subdir, fname=resource_path))
+            else:
+                path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path))
+                # a)
+                if path_from_rcc.startswith(state.environment.build_dir):
+                    result.append(File(is_built=True, subdir=state.subdir, fname=resource_path))
+                # b)
+                else:
+                    result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc))
+        return result
+
+    @FeatureNew('qt.has_tools', '0.54.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.has_tools',
+        KwargInfo('required', (bool, coredata.UserFeatureOption), default=False),
+        KwargInfo('method', str, default='auto'),
+    )
+    def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool:
+        method = kwargs.get('method', 'auto')
+        # We have to cast here because TypedDicts are invariant, even though
+        # ExtractRequiredKwArgs is a subset of HasToolKwArgs, type checkers
+        # will insist this is wrong
+        disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, default=False)
+        if disabled:
+            mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        self._detect_tools(state, method, required=False)
+        for tool in self.tools.values():
+            if not tool.found():
+                if required:
+                    raise MesonException('Qt tools not found')
+                return False
+        return True
+
+    @FeatureNew('qt.compile_resources', '0.59.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_resources',
+        KwargInfo('name', str),
+        KwargInfo('sources', ContainerTypeInfo(list, (File, str), allow_empty=False), listify=True, required=True),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('method', str, default='auto')
+    )
+    def compile_resources(self, state: 'ModuleState', args: T.Tuple, kwargs: 'ResourceCompilerKwArgs') -> ModuleReturnValue:
+        """Compile Qt resources files.
+
+        Uses CustomTargets to generate .cpp files from .qrc files.
+        """
+        out = self._compile_resources_impl(state, kwargs)
+        return ModuleReturnValue(out, [out])
+
+    def _compile_resources_impl(self, state: 'ModuleState', kwargs: 'ResourceCompilerKwArgs') -> T.List[build.CustomTarget]:
+        # Avoid the FeatureNew when dispatching from preprocess
+        self._detect_tools(state, kwargs['method'])
+        if not self.tools['rcc'].found():
+            err_msg = ("{0} sources specified and couldn't find {1}, "
+                       "please check your qt{2} installation")
+            raise MesonException(err_msg.format('RCC', f'rcc-qt{self.qt_version}', self.qt_version))
+
+        # List of generated CustomTargets
+        targets: T.List[build.CustomTarget] = []
+
+        # depfile arguments
+        DEPFILE_ARGS: T.List[str] = ['--depfile', '@DEPFILE@'] if self._rcc_supports_depfiles else []
+
+        name = kwargs['name']
+        sources = kwargs['sources']
+        extra_args = kwargs['extra_args']
+
+        # If a name was set generate a single .cpp file from all of the qrc
+        # files, otherwise generate one .cpp file per qrc file.
+        if name:
+            qrc_deps: T.List[File] = []
+            for s in sources:
+                qrc_deps.extend(self._parse_qrc_deps(state, s))
+
+            rcc_kwargs: T.Dict[str, T.Any] = {  # TODO: if CustomTarget had typing information we could use that here...
+                'input': sources,
+                'output': name + '.cpp',
+                'command': self.tools['rcc'].get_command() + ['-name', name, '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS,
+                'depend_files': qrc_deps,
+                'depfile': f'{name}.d',
+            }
+            res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
+            targets.append(res_target)
+        else:
+            for rcc_file in sources:
+                qrc_deps = self._parse_qrc_deps(state, rcc_file)
+                if isinstance(rcc_file, str):
+                    basename = os.path.basename(rcc_file)
+                else:
+                    basename = os.path.basename(rcc_file.fname)
+                name = f'qt{self.qt_version}-{basename.replace(".", "_")}'
+                rcc_kwargs = {
+                    'input': rcc_file,
+                    'output': f'{name}.cpp',
+                    'command': self.tools['rcc'].get_command() + ['-name', '@BASENAME@', '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS,
+                    'depend_files': qrc_deps,
+                    'depfile': f'{name}.d',
+                }
+                res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
+                targets.append(res_target)
+
+        return targets
+
+    @FeatureNew('qt.compile_ui', '0.59.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_ui',
+        KwargInfo('sources', ContainerTypeInfo(list, (File, str), allow_empty=False), listify=True, required=True),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('method', str, default='auto')
+    )
+    def compile_ui(self, state: 'ModuleState', args: T.Tuple, kwargs: 'UICompilerKwArgs') -> ModuleReturnValue:
+        """Compile UI resources into cpp headers."""
+        out = self._compile_ui_impl(state, kwargs)
+        return ModuleReturnValue(out, [out])
+
+    def _compile_ui_impl(self, state: 'ModuleState', kwargs: 'UICompilerKwArgs') -> build.GeneratedList:
+        # Avoid the FeatureNew when dispatching from preprocess
+        self._detect_tools(state, kwargs['method'])
+        if not self.tools['uic'].found():
+            err_msg = ("{0} sources specified and couldn't find {1}, "
+                       "please check your qt{2} installation")
+            raise MesonException(err_msg.format('UIC', f'uic-qt{self.qt_version}', self.qt_version))
+
+        # TODO: This generator isn't added to the generator list in the Interpreter
+        gen = build.Generator(
+            self.tools['uic'],
+            kwargs['extra_args'] + ['-o', '@OUTPUT@', '@INPUT@'],
+            ['ui_@BASENAME@.h'],
+            name=f'Qt{self.qt_version} ui')
+        return gen.process_files(kwargs['sources'], state)
+
+    @FeatureNew('qt.compile_moc', '0.59.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_moc',
+        KwargInfo('sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('method', str, default='auto'),
+        KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+        KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+    )
+    def compile_moc(self, state: 'ModuleState', args: T.Tuple, kwargs: 'MocCompilerKwArgs') -> ModuleReturnValue:
+        out = self._compile_moc_impl(state, kwargs)
+        return ModuleReturnValue(out, [out])
+
+    def _compile_moc_impl(self, state: 'ModuleState', kwargs: 'MocCompilerKwArgs') -> T.List[build.GeneratedList]:
+        # Avoid the FeatureNew when dispatching from preprocess
+        self._detect_tools(state, kwargs['method'])
+        if not self.tools['moc'].found():
+            err_msg = ("{0} sources specified and couldn't find {1}, "
+                       "please check your qt{2} installation")
+            raise MesonException(err_msg.format('MOC', f'uic-qt{self.qt_version}', self.qt_version))
+
+        if not (kwargs['headers'] or kwargs['sources']):
+            raise build.InvalidArguments('At least one of the "headers" or "sources" keyword arguments must be provied and not empty')
+
+        inc = state.get_include_args(include_dirs=kwargs['include_directories'])
+        compile_args: T.List[str] = []
+        for dep in kwargs['dependencies']:
+            compile_args.extend([a for a in dep.get_all_compile_args() if a.startswith(('-I', '-D'))])
+
+        output: T.List[build.GeneratedList] = []
+
+        arguments = kwargs['extra_args'] + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@']
+        if kwargs['headers']:
+            moc_gen = build.Generator(
+                self.tools['moc'], arguments, ['moc_@BASENAME@.cpp'],
+                name=f'Qt{self.qt_version} moc header')
+            output.append(moc_gen.process_files(kwargs['headers'], state))
+        if kwargs['sources']:
+            moc_gen = build.Generator(
+                self.tools['moc'], arguments, ['@BASENAME@.moc'],
+                name=f'Qt{self.qt_version} moc source')
+            output.append(moc_gen.process_files(kwargs['sources'], state))
+
+        return output
+
+    # We can't use typed_pos_args here, the signature is ambiguious
+    @typed_kwargs(
+        'qt.preprocess',
+        KwargInfo('sources', ContainerTypeInfo(list, (File, str)), listify=True, default=[], deprecated='0.59.0'),
+        KwargInfo('qresources', ContainerTypeInfo(list, (File, str)), listify=True, default=[]),
+        KwargInfo('ui_files', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('moc_sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('moc_headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('moc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.44.0'),
+        KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'),
+        KwargInfo('uic_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'),
+        KwargInfo('method', str, default='auto'),
+        KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+        KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+    )
+    def preprocess(self, state: 'ModuleState', args: T.List[T.Union[str, File]], kwargs: 'PreprocessKwArgs') -> ModuleReturnValue:
+        _sources = args[1:]
+        if _sources:
+            FeatureDeprecated.single_use('qt.preprocess positional sources', '0.59', state.subproject)
+        # List is invariant, os we have to cast...
+        sources = T.cast(T.List[T.Union[str, File, build.GeneratedList, build.CustomTarget]],
+                         _sources + kwargs['sources'])
+        for s in sources:
+            if not isinstance(s, (str, File)):
+                raise build.InvalidArguments('Variadic arguments to qt.preprocess must be Strings or Files')
+        method = kwargs['method']
+
+        if kwargs['qresources']:
+            # custom output name set? -> one output file, multiple otherwise
+            rcc_kwargs: 'ResourceCompilerKwArgs' = {'name': '', 'sources': kwargs['qresources'], 'extra_args': kwargs['rcc_extra_arguments'], 'method': method}
+            if args:
+                name = args[0]
+                if not isinstance(name, str):
+                    raise build.InvalidArguments('First argument to qt.preprocess must be a string')
+                rcc_kwargs['name'] = name
+            sources.extend(self._compile_resources_impl(state, rcc_kwargs))
+
+        if kwargs['ui_files']:
+            ui_kwargs: 'UICompilerKwArgs' = {'sources': kwargs['ui_files'], 'extra_args': kwargs['uic_extra_arguments'], 'method': method}
+            sources.append(self._compile_ui_impl(state, ui_kwargs))
+
+        if kwargs['moc_headers'] or kwargs['moc_sources']:
+            moc_kwargs: 'MocCompilerKwArgs' = {
+                'extra_args': kwargs['moc_extra_arguments'],
+                'sources': kwargs['moc_sources'],
+                'headers': kwargs['moc_headers'],
+                'include_directories': kwargs['include_directories'],
+                'dependencies': kwargs['dependencies'],
+                'method': method,
+            }
+            sources.extend(self._compile_moc_impl(state, moc_kwargs))
+
+        return ModuleReturnValue(sources, [sources])
+
+    @FeatureNew('qt.compile_translations', '0.44.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_translations',
+        KwargInfo('build_by_default', bool, default=False),
+        KwargInfo('install', bool, default=False),
+        KwargInfo('install_dir', str),
+        KwargInfo('method', str, default='auto'),
+        KwargInfo('qresource', str, since='0.56.0'),
+        KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.56.0'),
+        KwargInfo('ts_files', ContainerTypeInfo(list, (str, File)), listify=True, default=[]),
+    )
+    def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'CompileTranslationsKwArgs') -> ModuleReturnValue:
+        ts_files = kwargs['ts_files']
+        install_dir = kwargs['install_dir']
+        qresource = kwargs['qresource']
+        if qresource:
+            if ts_files:
+                raise MesonException('qt.compile_translations: Cannot specify both ts_files and qresource')
+            if os.path.dirname(qresource) != '':
+                raise MesonException('qt.compile_translations: qresource file name must not contain a subdirectory.')
+            qresource_file = File.from_built_file(state.subdir, qresource)
+            infile_abs = os.path.join(state.environment.source_dir, qresource_file.relative_name())
+            outfile_abs = os.path.join(state.environment.build_dir, qresource_file.relative_name())
+            os.makedirs(os.path.dirname(outfile_abs), exist_ok=True)
+            shutil.copy2(infile_abs, outfile_abs)
+            self.interpreter.add_build_def_file(infile_abs)
+
+            _, nodes = self._qrc_nodes(state, qresource_file)
+            for c in nodes:
+                if c.endswith('.qm'):
+                    ts_files.append(c.rstrip('.qm') + '.ts')
+                else:
+                    raise MesonException(f'qt.compile_translations: qresource can only contain qm files, found {c}')
+            results = self.preprocess(state, [], {'qresources': qresource_file, 'rcc_extra_arguments': kwargs['rcc_extra_arguments']})
+        self._detect_tools(state, kwargs['method'])
+        translations: T.List[build.CustomTarget] = []
+        for ts in ts_files:
+            if not self.tools['lrelease'].found():
+                raise MesonException('qt.compile_translations: ' +
+                                     self.tools['lrelease'].name + ' not found')
+            if qresource:
+                outdir = os.path.dirname(os.path.normpath(os.path.join(state.subdir, ts)))
+                ts = os.path.basename(ts)
+            else:
+                outdir = state.subdir
+            cmd = [self.tools['lrelease'], '@INPUT@', '-qm', '@OUTPUT@']
+            lrelease_kwargs = {'output': '@BASENAME@.qm',
+                               'input': ts,
+                               'install': kwargs.get('install', False),
+                               'build_by_default': kwargs.get('build_by_default', False),
+                               'command': cmd}
+            if install_dir is not None:
+                lrelease_kwargs['install_dir'] = install_dir
+            lrelease_target = build.CustomTarget(f'qt{self.qt_version}-compile-{ts}', outdir, state.subproject, lrelease_kwargs)
+            translations.append(lrelease_target)
+        if qresource:
+            return ModuleReturnValue(results.return_value[0], [results.new_objects, translations])
+        else:
+            return ModuleReturnValue(translations, [translations])
diff --git a/meson/mesonbuild/modules/qt4.py b/meson/mesonbuild/modules/qt4.py
new file mode 100644
index 000000000..e85a1506f
--- /dev/null
+++ b/meson/mesonbuild/modules/qt4.py
@@ -0,0 +1,25 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .qt import QtBaseModule
+
+
+class Qt4Module(QtBaseModule):
+
+    def __init__(self, interpreter):
+        QtBaseModule.__init__(self, interpreter, qt_version=4)
+
+
+def initialize(*args, **kwargs):
+    return Qt4Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/qt5.py b/meson/mesonbuild/modules/qt5.py
new file mode 100644
index 000000000..873c2dbeb
--- /dev/null
+++ b/meson/mesonbuild/modules/qt5.py
@@ -0,0 +1,25 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .qt import QtBaseModule
+
+
+class Qt5Module(QtBaseModule):
+
+    def __init__(self, interpreter):
+        QtBaseModule.__init__(self, interpreter, qt_version=5)
+
+
+def initialize(*args, **kwargs):
+    return Qt5Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/qt6.py b/meson/mesonbuild/modules/qt6.py
new file mode 100644
index 000000000..d9cd6519d
--- /dev/null
+++ b/meson/mesonbuild/modules/qt6.py
@@ -0,0 +1,25 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .qt import QtBaseModule
+
+
+class Qt6Module(QtBaseModule):
+
+    def __init__(self, interpreter):
+        QtBaseModule.__init__(self, interpreter, qt_version=6)
+
+
+def initialize(*args, **kwargs):
+    return Qt6Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/rpm.py b/meson/mesonbuild/modules/rpm.py
new file mode 100644
index 000000000..1fae14444
--- /dev/null
+++ b/meson/mesonbuild/modules/rpm.py
@@ -0,0 +1,186 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for RPM related
+functionality such as generating template RPM spec file.'''
+
+from .. import build
+from .. import compilers
+import datetime
+from .. import mlog
+from . import GirTarget, TypelibTarget
+from . import ExtensionModule
+from ..interpreterbase import noKwargs
+
+import os
+
+class RPMModule(ExtensionModule):
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'generate_spec_template': self.generate_spec_template,
+        })
+
+    @noKwargs
+    def generate_spec_template(self, state, args, kwargs):
+        required_compilers = self.__get_required_compilers(state)
+        proj = state.project_name.replace(' ', '_').replace('\t', '_')
+        so_installed = False
+        devel_subpkg = False
+        files = set()
+        files_devel = set()
+        to_delete = set()
+        for target in state.targets.values():
+            if isinstance(target, build.Executable) and target.need_install:
+                files.add('%%{_bindir}/%s' % target.get_filename())
+            elif isinstance(target, build.SharedLibrary) and target.need_install:
+                files.add('%%{_libdir}/%s' % target.get_filename())
+                for alias in target.get_aliases():
+                    if alias.endswith('.so'):
+                        files_devel.add('%%{_libdir}/%s' % alias)
+                    else:
+                        files.add('%%{_libdir}/%s' % alias)
+                so_installed = True
+            elif isinstance(target, build.StaticLibrary) and target.need_install:
+                to_delete.add('%%{buildroot}%%{_libdir}/%s' % target.get_filename())
+                mlog.warning('removing', mlog.bold(target.get_filename()),
+                             'from package because packaging static libs not recommended')
+            elif isinstance(target, GirTarget) and target.should_install():
+                files_devel.add('%%{_datadir}/gir-1.0/%s' % target.get_filename()[0])
+            elif isinstance(target, TypelibTarget) and target.should_install():
+                files.add('%%{_libdir}/girepository-1.0/%s' % target.get_filename()[0])
+        for header in state.headers:
+            if header.get_install_subdir():
+                files_devel.add('%%{_includedir}/%s/' % header.get_install_subdir())
+            else:
+                for hdr_src in header.get_sources():
+                    files_devel.add('%%{_includedir}/%s' % hdr_src)
+        for man in state.man:
+            for man_file in man.get_sources():
+                if man.locale:
+                    files.add('%%{_mandir}/%s/man%u/%s.*' % (man.locale, int(man_file.split('.')[-1]), man_file))
+                else:
+                    files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file))
+        if files_devel:
+            devel_subpkg = True
+
+        filename = os.path.join(state.environment.get_build_dir(),
+                                '%s.spec' % proj)
+        with open(filename, 'w+', encoding='utf-8') as fn:
+            fn.write('Name: %s\n' % proj)
+            fn.write('Version: # FIXME\n')
+            fn.write('Release: 1%{?dist}\n')
+            fn.write('Summary: # FIXME\n')
+            fn.write('License: # FIXME\n')
+            fn.write('\n')
+            fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
+            fn.write('\n')
+            fn.write('BuildRequires: meson\n')
+            for compiler in required_compilers:
+                fn.write('BuildRequires: %s\n' % compiler)
+            for dep in state.environment.coredata.deps.host:
+                fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0])
+#   ext_libs and ext_progs have been removed from coredata so the following code
+#   no longer works. It is kept as a reminder of the idea should anyone wish
+#   to re-implement it.
+#
+#            for lib in state.environment.coredata.ext_libs.values():
+#                name = lib.get_name()
+#                fn.write('BuildRequires: {} # FIXME\n'.format(name))
+#                mlog.warning('replace', mlog.bold(name), 'with the real package.',
+#                             'You can use following command to find package which '
+#                             'contains this lib:',
+#                             mlog.bold("dnf provides '*/lib{}.so'".format(name)))
+#            for prog in state.environment.coredata.ext_progs.values():
+#                if not prog.found():
+#                    fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' %
+#                             prog.get_name())
+#                else:
+#                    fn.write('BuildRequires: {}\n'.format(prog.get_path()))
+            fn.write('\n')
+            fn.write('%description\n')
+            fn.write('\n')
+            if devel_subpkg:
+                fn.write('%package devel\n')
+                fn.write('Summary: Development files for %{name}\n')
+                fn.write('Requires: %{name}%{?_isa} = %{?epoch:%{epoch}:}{version}-%{release}\n')
+                fn.write('\n')
+                fn.write('%description devel\n')
+                fn.write('Development files for %{name}.\n')
+                fn.write('\n')
+            fn.write('%prep\n')
+            fn.write('%autosetup\n')
+            fn.write('\n')
+            fn.write('%build\n')
+            fn.write('%meson\n')
+            fn.write('%meson_build\n')
+            fn.write('\n')
+            fn.write('%install\n')
+            fn.write('%meson_install\n')
+            if to_delete:
+                fn.write('rm -vf %s\n' % ' '.join(to_delete))
+            fn.write('\n')
+            fn.write('%check\n')
+            fn.write('%meson_test\n')
+            fn.write('\n')
+            fn.write('%files\n')
+            for f in files:
+                fn.write('%s\n' % f)
+            fn.write('\n')
+            if devel_subpkg:
+                fn.write('%files devel\n')
+                for f in files_devel:
+                    fn.write('%s\n' % f)
+                fn.write('\n')
+            if so_installed:
+                fn.write('%post -p /sbin/ldconfig\n')
+                fn.write('%postun -p /sbin/ldconfig\n')
+            fn.write('\n')
+            fn.write('%changelog\n')
+            fn.write('* %s meson  - \n' %
+                     datetime.date.today().strftime('%a %b %d %Y'))
+            fn.write('- \n')
+            fn.write('\n')
+        mlog.log('RPM spec template written to %s.spec.\n' % proj)
+
+    def __get_required_compilers(self, state):
+        required_compilers = set()
+        for compiler in state.environment.coredata.compilers.host.values():
+            # Elbrus has one 'lcc' package for every compiler
+            if isinstance(compiler, compilers.GnuCCompiler):
+                required_compilers.add('gcc')
+            elif isinstance(compiler, compilers.GnuCPPCompiler):
+                required_compilers.add('gcc-c++')
+            elif isinstance(compiler, compilers.ElbrusCCompiler):
+                required_compilers.add('lcc')
+            elif isinstance(compiler, compilers.ElbrusCPPCompiler):
+                required_compilers.add('lcc')
+            elif isinstance(compiler, compilers.ElbrusFortranCompiler):
+                required_compilers.add('lcc')
+            elif isinstance(compiler, compilers.ValaCompiler):
+                required_compilers.add('vala')
+            elif isinstance(compiler, compilers.GnuFortranCompiler):
+                required_compilers.add('gcc-gfortran')
+            elif isinstance(compiler, compilers.GnuObjCCompiler):
+                required_compilers.add('gcc-objc')
+            elif compiler == compilers.GnuObjCPPCompiler:
+                required_compilers.add('gcc-objc++')
+            else:
+                mlog.log('RPM spec file not created, generation not allowed for:',
+                         mlog.bold(compiler.get_id()))
+        return required_compilers
+
+
+def initialize(*args, **kwargs):
+    return RPMModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/sourceset.py b/meson/mesonbuild/modules/sourceset.py
new file mode 100644
index 000000000..ba8b30032
--- /dev/null
+++ b/meson/mesonbuild/modules/sourceset.py
@@ -0,0 +1,198 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import namedtuple
+from .. import mesonlib
+from .. import build
+from ..mesonlib import listify, OrderedSet
+from . import ExtensionModule, ModuleObject, MutableModuleObject
+from ..interpreterbase import (
+    noPosargs, noKwargs, permittedKwargs,
+    InterpreterException, InvalidArguments, InvalidCode, FeatureNew,
+)
+
+SourceSetRule = namedtuple('SourceSetRule', 'keys sources if_false sourcesets dependencies extra_deps')
+SourceFiles = namedtuple('SourceFiles', 'sources dependencies')
+
+class SourceSet(MutableModuleObject):
+    def __init__(self, interpreter):
+        super().__init__()
+        self.rules = []
+        self.subproject = interpreter.subproject
+        self.environment = interpreter.environment
+        self.subdir = interpreter.subdir
+        self.frozen = False
+        self.methods.update({
+            'add': self.add_method,
+            'add_all': self.add_all_method,
+            'all_sources': self.all_sources_method,
+            'all_dependencies': self.all_dependencies_method,
+            'apply': self.apply_method,
+        })
+
+    def check_source_files(self, arg, allow_deps):
+        sources = []
+        deps = []
+        for x in arg:
+            if isinstance(x, (str, mesonlib.File,
+                              build.GeneratedList, build.CustomTarget,
+                              build.CustomTargetIndex)):
+                sources.append(x)
+            elif hasattr(x, 'found'):
+                if not allow_deps:
+                    msg = 'Dependencies are not allowed in the if_false argument.'
+                    raise InvalidArguments(msg)
+                deps.append(x)
+            else:
+                msg = 'Sources must be strings or file-like objects.'
+                raise InvalidArguments(msg)
+        mesonlib.check_direntry_issues(sources)
+        return sources, deps
+
+    def check_conditions(self, arg):
+        keys = []
+        deps = []
+        for x in listify(arg):
+            if isinstance(x, str):
+                keys.append(x)
+            elif hasattr(x, 'found'):
+                deps.append(x)
+            else:
+                raise InvalidArguments('Conditions must be strings or dependency object')
+        return keys, deps
+
+    @permittedKwargs(['when', 'if_false', 'if_true'])
+    def add_method(self, state, args, kwargs):
+        if self.frozen:
+            raise InvalidCode('Tried to use \'add\' after querying the source set')
+        when = listify(kwargs.get('when', []))
+        if_true = listify(kwargs.get('if_true', []))
+        if_false = listify(kwargs.get('if_false', []))
+        if not when and not if_true and not if_false:
+            if_true = args
+        elif args:
+            raise InterpreterException('add called with both positional and keyword arguments')
+        keys, dependencies = self.check_conditions(when)
+        sources, extra_deps = self.check_source_files(if_true, True)
+        if_false, _ = self.check_source_files(if_false, False)
+        self.rules.append(SourceSetRule(keys, sources, if_false, [], dependencies, extra_deps))
+
+    @permittedKwargs(['when', 'if_true'])
+    def add_all_method(self, state, args, kwargs):
+        if self.frozen:
+            raise InvalidCode('Tried to use \'add_all\' after querying the source set')
+        when = listify(kwargs.get('when', []))
+        if_true = listify(kwargs.get('if_true', []))
+        if not when and not if_true:
+            if_true = args
+        elif args:
+            raise InterpreterException('add_all called with both positional and keyword arguments')
+        keys, dependencies = self.check_conditions(when)
+        for s in if_true:
+            if not isinstance(s, SourceSet):
+                raise InvalidCode('Arguments to \'add_all\' after the first must be source sets')
+            s.frozen = True
+        self.rules.append(SourceSetRule(keys, [], [], if_true, dependencies, []))
+
+    def collect(self, enabled_fn, all_sources, into=None):
+        if not into:
+            into = SourceFiles(OrderedSet(), OrderedSet())
+        for entry in self.rules:
+            if all(x.found() for x in entry.dependencies) and \
+               all(enabled_fn(key) for key in entry.keys):
+                into.sources.update(entry.sources)
+                into.dependencies.update(entry.dependencies)
+                into.dependencies.update(entry.extra_deps)
+                for ss in entry.sourcesets:
+                    ss.collect(enabled_fn, all_sources, into)
+                if not all_sources:
+                    continue
+            into.sources.update(entry.if_false)
+        return into
+
+    @noKwargs
+    @noPosargs
+    def all_sources_method(self, state, args, kwargs):
+        self.frozen = True
+        files = self.collect(lambda x: True, True)
+        return list(files.sources)
+
+    @noKwargs
+    @noPosargs
+    @FeatureNew('source_set.all_dependencies() method', '0.52.0')
+    def all_dependencies_method(self, state, args, kwargs):
+        self.frozen = True
+        files = self.collect(lambda x: True, True)
+        return list(files.dependencies)
+
+    @permittedKwargs(['strict'])
+    def apply_method(self, state, args, kwargs):
+        if len(args) != 1:
+            raise InterpreterException('Apply takes exactly one argument')
+        config_data = args[0]
+        self.frozen = True
+        strict = kwargs.get('strict', True)
+        if isinstance(config_data, dict):
+            def _get_from_config_data(key):
+                if strict and key not in config_data:
+                    raise InterpreterException(f'Entry {key} not in configuration dictionary.')
+                return config_data.get(key, False)
+        else:
+            config_cache = dict()
+
+            def _get_from_config_data(key):
+                nonlocal config_cache
+                if key not in config_cache:
+                    args = [key] if strict else [key, False]
+                    config_cache[key] = config_data.get_method(args, {})
+                return config_cache[key]
+
+        files = self.collect(_get_from_config_data, False)
+        res = SourceFilesObject(files)
+        return res
+
+class SourceFilesObject(ModuleObject):
+    def __init__(self, files):
+        super().__init__()
+        self.files = files
+        self.methods.update({
+            'sources': self.sources_method,
+            'dependencies': self.dependencies_method,
+        })
+
+    @noPosargs
+    @noKwargs
+    def sources_method(self, state, args, kwargs):
+        return list(self.files.sources)
+
+    @noPosargs
+    @noKwargs
+    def dependencies_method(self, state, args, kwargs):
+        return list(self.files.dependencies)
+
+class SourceSetModule(ExtensionModule):
+    @FeatureNew('SourceSet module', '0.51.0')
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.methods.update({
+            'source_set': self.source_set,
+        })
+
+    @noKwargs
+    @noPosargs
+    def source_set(self, state, args, kwargs):
+        return SourceSet(self.interpreter)
+
+def initialize(*args, **kwargs):
+    return SourceSetModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_cuda.py b/meson/mesonbuild/modules/unstable_cuda.py
new file mode 100644
index 000000000..d542fdd54
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_cuda.py
@@ -0,0 +1,350 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+import re
+
+from ..mesonlib import version_compare
+from ..compilers import CudaCompiler, Compiler
+
+from . import NewExtensionModule
+
+from ..interpreterbase import (
+    flatten, permittedKwargs, noKwargs,
+    InvalidArguments, FeatureNew
+)
+
+class CudaModule(NewExtensionModule):
+
+    @FeatureNew('CUDA module', '0.50.0')
+    def __init__(self, *args, **kwargs):
+        super().__init__()
+        self.methods.update({
+            "min_driver_version": self.min_driver_version,
+            "nvcc_arch_flags":    self.nvcc_arch_flags,
+            "nvcc_arch_readable": self.nvcc_arch_readable,
+        })
+
+    @noKwargs
+    def min_driver_version(self, state: 'ModuleState',
+                                 args: T.Tuple[str],
+                                 kwargs: T.Dict[str, T.Any]) -> str:
+        argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' +
+                                    'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' +
+                                    'the CUDA Toolkit\'s components (including NVCC) are versioned ' +
+                                    'independently from each other (and the CUDA Toolkit as a whole).')
+
+        if len(args) != 1 or not isinstance(args[0], str):
+            raise argerror
+
+        cuda_version = args[0]
+        driver_version_table = [
+            {'cuda_version': '>=11.5.0',   'windows': '496.04', 'linux': '495.29.05'},
+            {'cuda_version': '>=11.4.1',   'windows': '471.41', 'linux': '470.57.02'},
+            {'cuda_version': '>=11.4.0',   'windows': '471.11', 'linux': '470.42.01'},
+            {'cuda_version': '>=11.3.0',   'windows': '465.89', 'linux': '465.19.01'},
+            {'cuda_version': '>=11.2.2',   'windows': '461.33', 'linux': '460.32.03'},
+            {'cuda_version': '>=11.2.1',   'windows': '461.09', 'linux': '460.32.03'},
+            {'cuda_version': '>=11.2.0',   'windows': '460.82', 'linux': '460.27.03'},
+            {'cuda_version': '>=11.1.1',   'windows': '456.81', 'linux': '455.32'},
+            {'cuda_version': '>=11.1.0',   'windows': '456.38', 'linux': '455.23'},
+            {'cuda_version': '>=11.0.3',   'windows': '451.82', 'linux': '450.51.06'},
+            {'cuda_version': '>=11.0.2',   'windows': '451.48', 'linux': '450.51.05'},
+            {'cuda_version': '>=11.0.1',   'windows': '451.22', 'linux': '450.36.06'},
+            {'cuda_version': '>=10.2.89',  'windows': '441.22', 'linux': '440.33'},
+            {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'},
+            {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'},
+            {'cuda_version': '>=9.2.148',  'windows': '398.26', 'linux': '396.37'},
+            {'cuda_version': '>=9.2.88',   'windows': '397.44', 'linux': '396.26'},
+            {'cuda_version': '>=9.1.85',   'windows': '391.29', 'linux': '390.46'},
+            {'cuda_version': '>=9.0.76',   'windows': '385.54', 'linux': '384.81'},
+            {'cuda_version': '>=8.0.61',   'windows': '376.51', 'linux': '375.26'},
+            {'cuda_version': '>=8.0.44',   'windows': '369.30', 'linux': '367.48'},
+            {'cuda_version': '>=7.5.16',   'windows': '353.66', 'linux': '352.31'},
+            {'cuda_version': '>=7.0.28',   'windows': '347.62', 'linux': '346.46'},
+        ]
+
+        driver_version = 'unknown'
+        for d in driver_version_table:
+            if version_compare(cuda_version, d['cuda_version']):
+                driver_version = d.get(state.host_machine.system, d['linux'])
+                break
+
+        return driver_version
+
+    @permittedKwargs(['detected'])
+    def nvcc_arch_flags(self, state: 'ModuleState',
+                              args: T.Tuple[T.Union[Compiler, CudaCompiler, str]],
+                              kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs)
+        ret = self._nvcc_arch_flags(*nvcc_arch_args)[0]
+        return ret
+
+    @permittedKwargs(['detected'])
+    def nvcc_arch_readable(self, state: 'ModuleState',
+                                 args: T.Tuple[T.Union[Compiler, CudaCompiler, str]],
+                                 kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs)
+        ret = self._nvcc_arch_flags(*nvcc_arch_args)[1]
+        return ret
+
+    @staticmethod
+    def _break_arch_string(s):
+        s = re.sub('[ \t\r\n,;]+', ';', s)
+        s = s.strip(';').split(';')
+        return s
+
+    @staticmethod
+    def _detected_cc_from_compiler(c):
+        if isinstance(c, CudaCompiler):
+            return c.detected_cc
+        return ''
+
+    @staticmethod
+    def _version_from_compiler(c):
+        if isinstance(c, CudaCompiler):
+            return c.version
+        if isinstance(c, str):
+            return c
+        return 'unknown'
+
+    def _validate_nvcc_arch_args(self, args, kwargs):
+        argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!')
+
+        if len(args) < 1:
+            raise argerror
+        else:
+            compiler = args[0]
+            cuda_version = self._version_from_compiler(compiler)
+            if cuda_version == 'unknown':
+                raise argerror
+
+        arch_list = [] if len(args) <= 1 else flatten(args[1:])
+        arch_list = [self._break_arch_string(a) for a in arch_list]
+        arch_list = flatten(arch_list)
+        if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}):
+            raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+        arch_list = arch_list[0] if len(arch_list) == 1 else arch_list
+
+        detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler))
+        detected = flatten([detected])
+        detected = [self._break_arch_string(a) for a in detected]
+        detected = flatten(detected)
+        if not set(detected).isdisjoint({'All', 'Common', 'Auto'}):
+            raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+
+        return cuda_version, arch_list, detected
+
+    def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None):
+        """
+        Filter CUDA arch list (no codenames) for >= low and < hi architecture
+        bounds, and deduplicate.
+        If saturate is provided, architectures >= hi are replaced with saturate.
+        """
+
+        filtered_cuda_arch_list = []
+        for arch in cuda_arch_list:
+            if arch:
+                if lo and version_compare(arch, '<' + lo):
+                    continue
+                if hi and version_compare(arch, '>=' + hi):
+                    if not saturate:
+                        continue
+                    arch = saturate
+                if arch not in filtered_cuda_arch_list:
+                    filtered_cuda_arch_list.append(arch)
+        return filtered_cuda_arch_list
+
+    def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''):
+        """
+        Using the CUDA Toolkit version and the target architectures, compute
+        the NVCC architecture flags.
+        """
+
+        # Replicates much of the logic of
+        #     https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake
+        # except that a bug with cuda_arch_list="All" is worked around by
+        # tracking both lower and upper limits on GPU architectures.
+
+        cuda_known_gpu_architectures   = ['Fermi', 'Kepler', 'Maxwell']  # noqa: E221
+        cuda_common_gpu_architectures  = ['3.0', '3.5', '5.0']           # noqa: E221
+        cuda_hi_limit_gpu_architecture = None                            # noqa: E221
+        cuda_lo_limit_gpu_architecture = '2.0'                           # noqa: E221
+        cuda_all_gpu_architectures     = ['3.0', '3.2', '3.5', '5.0']    # noqa: E221
+
+        if version_compare(cuda_version, '<7.0'):
+            cuda_hi_limit_gpu_architecture = '5.2'
+
+        if version_compare(cuda_version, '>=7.0'):
+            cuda_known_gpu_architectures  += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra']  # noqa: E221
+            cuda_common_gpu_architectures += ['5.2']                                            # noqa: E221
+
+            if version_compare(cuda_version, '<8.0'):
+                cuda_common_gpu_architectures += ['5.2+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '6.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=8.0'):
+            cuda_known_gpu_architectures  += ['Pascal', 'Pascal+Tegra']  # noqa: E221
+            cuda_common_gpu_architectures += ['6.0', '6.1']              # noqa: E221
+            cuda_all_gpu_architectures    += ['6.0', '6.1', '6.2']       # noqa: E221
+
+            if version_compare(cuda_version, '<9.0'):
+                cuda_common_gpu_architectures += ['6.1+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '7.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=9.0'):
+            cuda_known_gpu_architectures  += ['Volta', 'Xavier'] # noqa: E221
+            cuda_common_gpu_architectures += ['7.0']             # noqa: E221
+            cuda_all_gpu_architectures    += ['7.0', '7.2']      # noqa: E221
+            # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features
+            cuda_lo_limit_gpu_architecture = '3.0'               # noqa: E221
+
+            if version_compare(cuda_version, '<10.0'):
+                cuda_common_gpu_architectures += ['7.2+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=10.0'):
+            cuda_known_gpu_architectures  += ['Turing'] # noqa: E221
+            cuda_common_gpu_architectures += ['7.5']    # noqa: E221
+            cuda_all_gpu_architectures    += ['7.5']    # noqa: E221
+
+            if version_compare(cuda_version, '<11.0'):
+                cuda_common_gpu_architectures += ['7.5+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=11.0'):
+            cuda_known_gpu_architectures  += ['Ampere'] # noqa: E221
+            cuda_common_gpu_architectures += ['8.0']    # noqa: E221
+            cuda_all_gpu_architectures    += ['8.0']    # noqa: E221
+            # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features
+            cuda_lo_limit_gpu_architecture = '3.5'      # noqa: E221
+
+            if version_compare(cuda_version, '<11.1'):
+                cuda_common_gpu_architectures += ['8.0+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.6'        # noqa: E221
+
+        if version_compare(cuda_version, '>=11.1'):
+            cuda_common_gpu_architectures += ['8.6', '8.6+PTX']  # noqa: E221
+            cuda_all_gpu_architectures    += ['8.6']             # noqa: E221
+
+            if version_compare(cuda_version, '<12.0'):
+                cuda_hi_limit_gpu_architecture = '9.0'        # noqa: E221
+
+        if not cuda_arch_list:
+            cuda_arch_list = 'Auto'
+
+        if   cuda_arch_list == 'All':     # noqa: E271
+            cuda_arch_list = cuda_known_gpu_architectures
+        elif cuda_arch_list == 'Common':  # noqa: E271
+            cuda_arch_list = cuda_common_gpu_architectures
+        elif cuda_arch_list == 'Auto':    # noqa: E271
+            if detected:
+                if isinstance(detected, list):
+                    cuda_arch_list = detected
+                else:
+                    cuda_arch_list = self._break_arch_string(detected)
+                cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list,
+                                                             cuda_lo_limit_gpu_architecture,
+                                                             cuda_hi_limit_gpu_architecture,
+                                                             cuda_common_gpu_architectures[-1])
+            else:
+                cuda_arch_list = cuda_common_gpu_architectures
+        elif isinstance(cuda_arch_list, str):
+            cuda_arch_list = self._break_arch_string(cuda_arch_list)
+
+        cuda_arch_list = sorted([x for x in set(cuda_arch_list) if x])
+
+        cuda_arch_bin = []
+        cuda_arch_ptx = []
+        for arch_name in cuda_arch_list:
+            arch_bin = []
+            arch_ptx = []
+            add_ptx = arch_name.endswith('+PTX')
+            if add_ptx:
+                arch_name = arch_name[:-len('+PTX')]
+
+            if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name):
+                arch_bin, arch_ptx = [arch_name], [arch_name]
+            else:
+                arch_bin, arch_ptx = {
+                    'Fermi':         (['2.0', '2.1(2.0)'], []),
+                    'Kepler+Tegra':  (['3.2'],             []),
+                    'Kepler+Tesla':  (['3.7'],             []),
+                    'Kepler':        (['3.0', '3.5'],      ['3.5']),
+                    'Maxwell+Tegra': (['5.3'],             []),
+                    'Maxwell':       (['5.0', '5.2'],      ['5.2']),
+                    'Pascal':        (['6.0', '6.1'],      ['6.1']),
+                    'Pascal+Tegra':  (['6.2'],             []),
+                    'Volta':         (['7.0'],             ['7.0']),
+                    'Xavier':        (['7.2'],             []),
+                    'Turing':        (['7.5'],             ['7.5']),
+                    'Ampere':        (['8.0'],             ['8.0']),
+                }.get(arch_name, (None, None))
+
+            if arch_bin is None:
+                raise InvalidArguments('Unknown CUDA Architecture Name {}!'
+                                       .format(arch_name))
+
+            cuda_arch_bin += arch_bin
+
+            if add_ptx:
+                if not arch_ptx:
+                    arch_ptx = arch_bin
+                cuda_arch_ptx += arch_ptx
+
+        cuda_arch_bin = sorted(list(set(cuda_arch_bin)))
+        cuda_arch_ptx = sorted(list(set(cuda_arch_ptx)))
+
+        nvcc_flags = []
+        nvcc_archs_readable = []
+
+        for arch in cuda_arch_bin:
+            arch, codev = re.fullmatch(
+                '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups()
+
+            if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
+                continue
+            if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+                continue
+
+            if codev:
+                arch = arch.replace('.', '')
+                codev = codev.replace('.', '')
+                nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch]
+                nvcc_archs_readable += ['sm_' + arch]
+            else:
+                arch = arch.replace('.', '')
+                nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch]
+                nvcc_archs_readable += ['sm_' + arch]
+
+        for arch in cuda_arch_ptx:
+            arch, codev = re.fullmatch(
+                '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups()
+
+            if codev:
+                arch = codev
+
+            if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
+                continue
+            if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+                continue
+
+            arch = arch.replace('.', '')
+            nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch]
+            nvcc_archs_readable += ['compute_' + arch]
+
+        return nvcc_flags, nvcc_archs_readable
+
+def initialize(*args, **kwargs):
+    return CudaModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_external_project.py b/meson/mesonbuild/modules/unstable_external_project.py
new file mode 100644
index 000000000..573622696
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_external_project.py
@@ -0,0 +1,268 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, subprocess, shlex
+from pathlib import Path
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleState, NewExtensionModule
+from .. import mlog, build
+from ..mesonlib import (MesonException, Popen_safe, MachineChoice,
+                       get_variable_regex, do_replacement, extract_as_list)
+from ..interpreterbase import InterpreterException, FeatureNew
+from ..interpreterbase import permittedKwargs, typed_pos_args
+from ..compilers.compilers import CFLAGS_MAPPING, CEXE_MAPPING
+from ..dependencies import InternalDependency, PkgConfigDependency
+from ..mesonlib import OptionKey
+
+class ExternalProject(NewExtensionModule):
+    def __init__(self,
+                 state: ModuleState,
+                 configure_command: str,
+                 configure_options: T.List[str],
+                 cross_configure_options: T.List[str],
+                 env: build.EnvironmentVariables,
+                 verbose: bool):
+        super().__init__()
+        self.methods.update({'dependency': self.dependency_method,
+                             })
+
+        self.subdir = Path(state.subdir)
+        self.project_version = state.project_version
+        self.subproject = state.subproject
+        self.env = state.environment
+        self.build_machine = state.build_machine
+        self.host_machine = state.host_machine
+        self.configure_command = configure_command
+        self.configure_options = configure_options
+        self.cross_configure_options = cross_configure_options
+        self.verbose = verbose
+        self.user_env = env
+
+        self.src_dir = Path(self.env.get_source_dir(), self.subdir)
+        self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build')
+        self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist')
+        self.prefix = Path(self.env.coredata.get_option(OptionKey('prefix')))
+        self.libdir = Path(self.env.coredata.get_option(OptionKey('libdir')))
+        self.includedir = Path(self.env.coredata.get_option(OptionKey('includedir')))
+        self.name = self.src_dir.name
+
+        # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make`
+        # will install files into "c:/bar/c:/foo" which is an invalid path.
+        # Work around that issue by removing the drive from prefix.
+        if self.prefix.drive:
+            self.prefix = self.prefix.relative_to(self.prefix.drive)
+
+        # self.prefix is an absolute path, so we cannot append it to another path.
+        self.rel_prefix = self.prefix.relative_to(self.prefix.root)
+
+        self.make = state.find_program('make')
+        self.make = self.make.get_command()[0]
+
+        self._configure(state)
+
+        self.targets = self._create_targets()
+
+    def _configure(self, state: ModuleState):
+        # Assume it's the name of a script in source dir, like 'configure',
+        # 'autogen.sh', etc).
+        configure_path = Path(self.src_dir, self.configure_command)
+        configure_prog = state.find_program(configure_path.as_posix())
+        configure_cmd = configure_prog.get_command()
+
+        d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()),
+             ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()),
+             ('INCLUDEDIR', None, self.includedir.as_posix()),
+             ]
+        self._validate_configure_options(d)
+
+        configure_cmd += self._format_options(self.configure_options, d)
+
+        if self.env.is_cross_build():
+            host = '{}-{}-{}'.format(self.host_machine.cpu_family,
+                                     self.build_machine.system,
+                                     self.host_machine.system)
+            d = [('HOST', None, host)]
+            configure_cmd += self._format_options(self.cross_configure_options, d)
+
+        # Set common env variables like CFLAGS, CC, etc.
+        link_exelist = []
+        link_args = []
+        self.run_env = os.environ.copy()
+        for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items():
+            if any(lang not in i for i in (CEXE_MAPPING, CFLAGS_MAPPING)):
+                continue
+            cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang)
+            self.run_env[CEXE_MAPPING[lang]] = self._quote_and_join(compiler.get_exelist())
+            self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs)
+            if not link_exelist:
+                link_exelist = compiler.get_linker_exelist()
+                link_args = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang)
+        if link_exelist:
+            # FIXME: Do not pass linker because Meson uses CC as linker wrapper,
+            # but autotools often expects the real linker (e.h. GNU ld).
+            # self.run_env['LD'] = self._quote_and_join(link_exelist)
+            pass
+        self.run_env['LDFLAGS'] = self._quote_and_join(link_args)
+
+        self.run_env = self.user_env.get_env(self.run_env)
+
+        PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST,
+                                      Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix())
+
+        self.build_dir.mkdir(parents=True, exist_ok=True)
+        self._run('configure', configure_cmd)
+
+    def _quote_and_join(self, array: T.List[str]) -> str:
+        return ' '.join([shlex.quote(i) for i in array])
+
+    def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]]):
+        # Ensure the user at least try to pass basic info to the build system,
+        # like the prefix, libdir, etc.
+        for key, default, val in variables:
+            if default is None:
+                continue
+            key_format = f'@{key}@'
+            for option in self.configure_options:
+                if key_format in option:
+                    break
+            else:
+                FeatureNew('Default configure_option', '0.57.0').use(self.subproject)
+                self.configure_options.append(default)
+
+    def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]:
+        out = []
+        missing = set()
+        regex = get_variable_regex('meson')
+        confdata = {k: (v, None) for k, d, v in variables}
+        for o in options:
+            arg, missing_vars = do_replacement(regex, o, 'meson', confdata)
+            missing.update(missing_vars)
+            out.append(arg)
+        if missing:
+            var_list = ", ".join(map(repr, sorted(missing)))
+            raise EnvironmentException(
+                f"Variables {var_list} in configure options are missing.")
+        return out
+
+    def _run(self, step: str, command: T.List[str]):
+        mlog.log(f'External project {self.name}:', mlog.bold(step))
+        m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+        log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log')
+        output = None
+        if not self.verbose:
+            output = open(log_filename, 'w', encoding='utf-8')
+            output.write(m + '\n')
+            output.flush()
+        else:
+            mlog.log(m)
+        p, o, e = Popen_safe(command, cwd=str(self.build_dir), env=self.run_env,
+                                      stderr=subprocess.STDOUT,
+                                      stdout=output)
+        if p.returncode != 0:
+            m = f'{step} step returned error code {p.returncode}.'
+            if not self.verbose:
+                m += '\nSee logs: ' + str(log_filename)
+            raise MesonException(m)
+
+    def _create_targets(self):
+        cmd = self.env.get_build_command()
+        cmd += ['--internal', 'externalproject',
+                '--name', self.name,
+                '--srcdir', self.src_dir.as_posix(),
+                '--builddir', self.build_dir.as_posix(),
+                '--installdir', self.install_dir.as_posix(),
+                '--logdir', mlog.log_dir,
+                '--make', self.make,
+                ]
+        if self.verbose:
+            cmd.append('--verbose')
+
+        target_kwargs = {'output': f'{self.name}.stamp',
+                         'depfile': f'{self.name}.d',
+                         'command': cmd + ['@OUTPUT@', '@DEPFILE@'],
+                         'console': True,
+                         }
+        self.target = build.CustomTarget(self.name,
+                                         self.subdir.as_posix(),
+                                         self.subproject,
+                                         target_kwargs)
+
+        idir = build.InstallDir(self.subdir.as_posix(),
+                                Path('dist', self.rel_prefix).as_posix(),
+                                install_dir='.',
+                                install_mode=None,
+                                exclude=None,
+                                strip_directory=True,
+                                from_source_dir=False,
+                                subproject=self.subproject)
+
+        return [self.target, idir]
+
+    @permittedKwargs({'subdir'})
+    @typed_pos_args('external_project.dependency', str)
+    def dependency_method(self, state, args: T.Tuple[str], kwargs):
+        libname = args[0]
+
+        subdir = kwargs.get('subdir', '')
+        if not isinstance(subdir, str):
+            m = 'ExternalProject.dependency subdir keyword argument must be string.'
+            raise InterpreterException(m)
+
+        abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir)
+        if subdir:
+            abs_includedir = Path(abs_includedir, subdir)
+        abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir)
+
+        version = self.project_version['version']
+        incdir = []
+        compile_args = [f'-I{abs_includedir}']
+        link_args = [f'-L{abs_libdir}', f'-l{libname}']
+        libs = []
+        libs_whole = []
+        sources = self.target
+        final_deps = []
+        variables = {}
+        dep = InternalDependency(version, incdir, compile_args, link_args, libs,
+                                 libs_whole, sources, final_deps, variables)
+        return dep
+
+
+class ExternalProjectModule(ExtensionModule):
+    @FeatureNew('External build system Module', '0.56.0')
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({'add_project': self.add_project,
+                             })
+
+    @permittedKwargs({'configure_options', 'cross_configure_options', 'verbose', 'env'})
+    @typed_pos_args('external_project_mod.add_project', str)
+    def add_project(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]):
+        configure_command = args[0]
+        configure_options = extract_as_list(kwargs, 'configure_options')
+        cross_configure_options = extract_as_list(kwargs, 'cross_configure_options')
+        if not cross_configure_options:
+            cross_configure_options = ['--host=@HOST@']
+        verbose = kwargs.get('verbose', False)
+        env = self.interpreter.unpack_env_kwarg(kwargs)
+        project = ExternalProject(state,
+                                  configure_command,
+                                  configure_options,
+                                  cross_configure_options,
+                                  env, verbose)
+        return ModuleReturnValue(project, project.targets)
+
+
+def initialize(*args, **kwargs):
+    return ExternalProjectModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_icestorm.py b/meson/mesonbuild/modules/unstable_icestorm.py
new file mode 100644
index 000000000..841e647e4
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_icestorm.py
@@ -0,0 +1,89 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib
+from ..interpreterbase import flatten
+from ..interpreterbase import FeatureNew
+
+from . import ExtensionModule
+
+class IceStormModule(ExtensionModule):
+
+    @FeatureNew('FPGA/Icestorm Module', '0.45.0')
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.yosys_bin = None
+        self.methods.update({
+            'project': self.project,
+        })
+
+    def detect_binaries(self, state):
+        self.yosys_bin = state.find_program('yosys')
+        self.arachne_bin = state.find_program('arachne-pnr')
+        self.icepack_bin = state.find_program('icepack')
+        self.iceprog_bin = state.find_program('iceprog')
+        self.icetime_bin = state.find_program('icetime')
+
+    def project(self, state, args, kwargs):
+        if not self.yosys_bin:
+            self.detect_binaries(state)
+        if not args:
+            raise mesonlib.MesonException('Project requires at least one argument, which is the project name.')
+        proj_name = args[0]
+        arg_sources = args[1:]
+        if not isinstance(proj_name, str):
+            raise mesonlib.MesonException('Argument must be a string.')
+        kwarg_sources = kwargs.get('sources', [])
+        if not isinstance(kwarg_sources, list):
+            kwarg_sources = [kwarg_sources]
+        all_sources = self.interpreter.source_strings_to_files(flatten(arg_sources + kwarg_sources))
+        if 'constraint_file' not in kwargs:
+            raise mesonlib.MesonException('Constraint file not specified.')
+
+        constraint_file = self.interpreter.source_strings_to_files(kwargs['constraint_file'])
+        if len(constraint_file) != 1:
+            raise mesonlib.MesonException('Constraint file must contain one and only one entry.')
+        blif_name = proj_name + '_blif'
+        blif_fname = proj_name + '.blif'
+        asc_name = proj_name + '_asc'
+        asc_fname = proj_name + '.asc'
+        bin_name = proj_name + '_bin'
+        bin_fname = proj_name + '.bin'
+        time_name = proj_name + '-time'
+        upload_name = proj_name + '-upload'
+
+        blif_target = self.interpreter.func_custom_target(None, [blif_name], {
+            'input': all_sources,
+            'output': blif_fname,
+            'command': [self.yosys_bin, '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@']})
+
+        asc_target = self.interpreter.func_custom_target(None, [asc_name], {
+            'input': blif_target,
+            'output': asc_fname,
+            'command': [self.arachne_bin, '-q', '-d', '1k', '-p', constraint_file, '@INPUT@', '-o', '@OUTPUT@']})
+
+        bin_target = self.interpreter.func_custom_target(None, [bin_name], {
+            'input': asc_target,
+            'output': bin_fname,
+            'command': [self.icepack_bin, '@INPUT@', '@OUTPUT@'],
+            'build_by_default': True})
+
+        self.interpreter.func_run_target(None, [upload_name], {
+            'command': [self.iceprog_bin, bin_target]})
+
+        self.interpreter.func_run_target(None, [time_name], {
+            'command': [self.icetime_bin, bin_target]})
+
+def initialize(*args, **kwargs):
+    return IceStormModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_rust.py b/meson/mesonbuild/modules/unstable_rust.py
new file mode 100644
index 000000000..995370a7d
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_rust.py
@@ -0,0 +1,227 @@
+# Copyright © 2020 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue
+from .. import mlog
+from ..build import BuildTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments, IncludeDirs, CustomTarget
+from ..interpreter.interpreter import TEST_KWARGS
+from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, permittedKwargs, FeatureNew, typed_kwargs, typed_pos_args, noPosargs
+from ..mesonlib import stringlistify, listify, typeslistify, File
+from ..dependencies import Dependency, ExternalLibrary
+from ..interpreterbase import InterpreterException, permittedKwargs, FeatureNew, typed_pos_args, noPosargs
+from ..mesonlib import stringlistify, listify, typeslistify, File
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..interpreter import Interpreter
+    from ..interpreter import kwargs as _kwargs
+    from ..interpreter.interpreter import SourceOutputs
+    from ..programs import ExternalProgram
+
+    class FuncTest(_kwargs.BaseTest):
+
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+        is_parallel: bool
+
+
+class RustModule(ExtensionModule):
+
+    """A module that holds helper functions for rust."""
+
+    @FeatureNew('rust module', '0.57.0')
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__(interpreter)
+        self._bindgen_bin: T.Optional['ExternalProgram'] = None
+        self.methods.update({
+            'test': self.test,
+            'bindgen': self.bindgen,
+        })
+
+    @typed_pos_args('rust.test', str, BuildTarget)
+    @typed_kwargs(
+        'rust.test',
+        *TEST_KWARGS,
+        KwargInfo('is_parallel', bool, default=False),
+        KwargInfo(
+            'dependencies',
+            ContainerTypeInfo(list, (Dependency, ExternalLibrary)),
+            listify=True,
+            default=[]),
+    )
+    def test(self, state: 'ModuleState', args: T.Tuple[str, BuildTarget], kwargs: 'FuncTest') -> ModuleReturnValue:
+        """Generate a rust test target from a given rust target.
+
+        Rust puts it's unitests inside it's main source files, unlike most
+        languages that put them in external files. This means that normally
+        you have to define two separate targets with basically the same
+        arguments to get tests:
+
+        ```meson
+        rust_lib_sources = [...]
+        rust_lib = static_library(
+            'rust_lib',
+            rust_lib_sources,
+        )
+
+        rust_lib_test = executable(
+            'rust_lib_test',
+            rust_lib_sources,
+            rust_args : ['--test'],
+        )
+
+        test(
+            'rust_lib_test',
+            rust_lib_test,
+            protocol : 'rust',
+        )
+        ```
+
+        This is all fine, but not very DRY. This method makes it much easier
+        to define rust tests:
+
+        ```meson
+        rust = import('unstable-rust')
+
+        rust_lib = static_library(
+            'rust_lib',
+            [sources],
+        )
+
+        rust.test('rust_lib_test', rust_lib)
+        ```
+        """
+        name = args[0]
+        base_target: BuildTarget = args[1]
+        if not base_target.uses_rust():
+            raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target')
+        extra_args = kwargs['args']
+
+        # Delete any arguments we don't want passed
+        if '--test' in extra_args:
+            mlog.warning('Do not add --test to rustmod.test arguments')
+            extra_args.remove('--test')
+        if '--format' in extra_args:
+            mlog.warning('Do not add --format to rustmod.test arguments')
+            i = extra_args.index('--format')
+            # Also delete the argument to --format
+            del extra_args[i + 1]
+            del extra_args[i]
+        for i, a in enumerate(extra_args):
+            if isinstance(a, str) and a.startswith('--format='):
+                del extra_args[i]
+                break
+
+        dependencies = [d for d in kwargs['dependencies']]
+
+        # We need to cast here, as currently these don't have protocol in them, but test itself does.
+        tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy())
+
+        tkwargs['args'] = extra_args + ['--test', '--format', 'pretty']
+        tkwargs['protocol'] = 'rust'
+
+        new_target_kwargs = base_target.kwargs.copy()
+        # Don't mutate the shallow copied list, instead replace it with a new
+        # one
+        new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test']
+        new_target_kwargs['install'] = False
+        new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + dependencies
+
+        new_target = Executable(
+            name, base_target.subdir, state.subproject,
+            base_target.for_machine, base_target.sources,
+            base_target.objects, base_target.environment,
+            new_target_kwargs
+        )
+
+        test = self.interpreter.make_test(
+            self.interpreter.current_node, (name, new_target), tkwargs)
+
+        return ModuleReturnValue(None, [new_target, test])
+
+    @noPosargs
+    @permittedKwargs({'input', 'output', 'include_directories', 'c_args', 'args'})
+    def bindgen(self, state: 'ModuleState', args: T.List, kwargs: T.Dict[str, T.Any]) -> ModuleReturnValue:
+        """Wrapper around bindgen to simplify it's use.
+
+        The main thing this simplifies is the use of `include_directory`
+        objects, instead of having to pass a plethora of `-I` arguments.
+        """
+        header: 'SourceOutputs'
+        _deps: T.Sequence['SourceOutputs']
+        try:
+            header, *_deps = self.interpreter.source_strings_to_files(listify(kwargs['input']))
+        except KeyError:
+            raise InvalidArguments('rustmod.bindgen() `input` argument must have at least one element.')
+
+        try:
+            output: str = kwargs['output']
+        except KeyError:
+            raise InvalidArguments('rustmod.bindgen() `output` must be provided')
+        if not isinstance(output, str):
+            raise InvalidArguments('rustmod.bindgen() `output` argument must be a string.')
+
+        include_dirs: T.List[IncludeDirs] = typeslistify(listify(kwargs.get('include_directories', [])), IncludeDirs)
+        c_args: T.List[str] = stringlistify(listify(kwargs.get('c_args', [])))
+        bind_args: T.List[str] = stringlistify(listify(kwargs.get('args', [])))
+
+        # Split File and Target dependencies to add pass to CustomTarget
+        depends: T.List[T.Union[GeneratedList, BuildTarget, CustomTargetIndex, CustomTarget]] = []
+        depend_files: T.List[File] = []
+        for d in _deps:
+            if isinstance(d, File):
+                depend_files.append(d)
+            else:
+                depends.append(d)
+
+        inc_strs: T.List[str] = []
+        for i in include_dirs:
+            # bindgen always uses clang, so it's safe to hardcode -I here
+            inc_strs.extend([f'-I{x}' for x in i.to_string_list(state.environment.get_source_dir())])
+
+        if self._bindgen_bin is None:
+            self._bindgen_bin = state.find_program('bindgen')
+
+        name: str
+        if isinstance(header, File):
+            name = header.fname
+        else:
+            name = header.get_outputs()[0]
+
+        target = CustomTarget(
+            f'rustmod-bindgen-{name}'.replace('/', '_'),
+            state.subdir,
+            state.subproject,
+            {
+                'input': header,
+                'output': output,
+                'command': self._bindgen_bin.get_command() + [
+                    '@INPUT@', '--output',
+                    os.path.join(state.environment.build_dir, '@OUTPUT@')] +
+                    bind_args + ['--'] + c_args + inc_strs +
+                    ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'],
+                'depfile': '@PLAINNAME@.d',
+                'depends': depends,
+                'depend_files': depend_files,
+            },
+            backend=state.backend,
+        )
+
+        return ModuleReturnValue([target], [target])
+
+
+def initialize(*args: T.List, **kwargs: T.Dict) -> RustModule:
+    return RustModule(*args, **kwargs)  # type: ignore
diff --git a/meson/mesonbuild/modules/unstable_simd.py b/meson/mesonbuild/modules/unstable_simd.py
new file mode 100644
index 000000000..3339cea5a
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_simd.py
@@ -0,0 +1,88 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, compilers, mlog
+
+from . import ExtensionModule
+
+from ..interpreterbase import FeatureNew
+
+class SimdModule(ExtensionModule):
+
+    @FeatureNew('SIMD module', '0.42.0')
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        # FIXME add Altivec and AVX512.
+        self.isets = ('mmx',
+                      'sse',
+                      'sse2',
+                      'sse3',
+                      'ssse3',
+                      'sse41',
+                      'sse42',
+                      'avx',
+                      'avx2',
+                      'neon',
+                      )
+        self.methods.update({
+            'check': self.check,
+        })
+
+    def check(self, state, args, kwargs):
+        result = []
+        if len(args) != 1:
+            raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.')
+        prefix = args[0]
+        if not isinstance(prefix, str):
+            raise mesonlib.MesonException('Argument must be a string.')
+        if 'compiler' not in kwargs:
+            raise mesonlib.MesonException('Must specify compiler keyword')
+        if 'sources' in kwargs:
+            raise mesonlib.MesonException('SIMD module does not support the "sources" keyword')
+        basic_kwargs = {}
+        for key, value in kwargs.items():
+            if key not in self.isets and key != 'compiler':
+                basic_kwargs[key] = value
+        compiler = kwargs['compiler']
+        if not isinstance(compiler, compilers.compilers.Compiler):
+            raise mesonlib.MesonException('Compiler argument must be a compiler object.')
+        cdata = self.interpreter.func_configuration_data(None, [], {})
+        conf = cdata.conf_data
+        for iset in self.isets:
+            if iset not in kwargs:
+                continue
+            iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate.
+            args = compiler.get_instruction_set_args(iset)
+            if args is None:
+                mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+                continue
+            if args:
+                if not compiler.has_multi_arguments(args, state.environment)[0]:
+                    mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+                    continue
+            mlog.log('Compiler supports %s:' % iset, mlog.green('YES'))
+            conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset)
+            libname = prefix + '_' + iset
+            lib_kwargs = {'sources': iset_fname,
+                          }
+            lib_kwargs.update(basic_kwargs)
+            langarg_key = compiler.get_language() + '_args'
+            old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key)
+            all_lang_args = old_lang_args + args
+            lib_kwargs[langarg_key] = all_lang_args
+            result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs))
+        return [result, cdata]
+
+def initialize(*args, **kwargs):
+    return SimdModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/windows.py b/meson/mesonbuild/modules/windows.py
new file mode 100644
index 000000000..7f627cff1
--- /dev/null
+++ b/meson/mesonbuild/modules/windows.py
@@ -0,0 +1,171 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import os
+import re
+
+from .. import mlog
+from .. import mesonlib, build
+from ..mesonlib import MachineChoice, MesonException, extract_as_list
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs, FeatureNewKwargs, flatten
+from ..programs import ExternalProgram
+
+class ResourceCompilerType(enum.Enum):
+    windres = 1
+    rc = 2
+    wrc = 3
+
+class WindowsModule(ExtensionModule):
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'compile_resources': self.compile_resources,
+        })
+
+    def detect_compiler(self, compilers):
+        for l in ('c', 'cpp'):
+            if l in compilers:
+                return compilers[l]
+        raise MesonException('Resource compilation requires a C or C++ compiler.')
+
+    def _find_resource_compiler(self, state):
+        # FIXME: Does not handle `native: true` executables, see
+        # See https://github.com/mesonbuild/meson/issues/1531
+        # Take a parameter instead of the hardcoded definition below
+        for_machine = MachineChoice.HOST
+
+        if hasattr(self, '_rescomp'):
+            return self._rescomp
+
+        # Will try cross / native file and then env var
+        rescomp = ExternalProgram.from_bin_list(state.environment, for_machine, 'windres')
+
+        if not rescomp or not rescomp.found():
+            comp = self.detect_compiler(state.environment.coredata.compilers[for_machine])
+            if comp.id in {'msvc', 'clang-cl', 'intel-cl'}:
+                rescomp = ExternalProgram('rc', silent=True)
+            else:
+                rescomp = ExternalProgram('windres', silent=True)
+
+        if not rescomp.found():
+            raise MesonException('Could not find Windows resource compiler')
+
+        for (arg, match, rc_type) in [
+                ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc),
+                ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres),
+                ('--version', '^.*Wine Resource Compiler.*$', ResourceCompilerType.wrc),
+        ]:
+            p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg])
+            m = re.search(match, o, re.MULTILINE)
+            if m:
+                mlog.log('Windows resource compiler: %s' % m.group())
+                self._rescomp = (rescomp, rc_type)
+                break
+        else:
+            raise MesonException('Could not determine type of Windows resource compiler')
+
+        return self._rescomp
+
+    @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends'])
+    @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'})
+    def compile_resources(self, state, args, kwargs):
+        extra_args = mesonlib.stringlistify(flatten(kwargs.get('args', [])))
+        wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True)
+        wrc_depends = extract_as_list(kwargs, 'depends', pop = True)
+        for d in wrc_depends:
+            if isinstance(d, build.CustomTarget):
+                extra_args += state.get_include_args([
+                    build.IncludeDirs('', [], False, [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(d))])
+                ])
+        inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True)
+        for incd in inc_dirs:
+            if not isinstance(incd, (str, build.IncludeDirs)):
+                raise MesonException('Resource include dirs should be include_directories().')
+        extra_args += state.get_include_args(inc_dirs)
+
+        rescomp, rescomp_type = self._find_resource_compiler(state)
+        if rescomp_type == ResourceCompilerType.rc:
+            # RC is used to generate .res files, a special binary resource
+            # format, which can be passed directly to LINK (apparently LINK uses
+            # CVTRES internally to convert this to a COFF object)
+            suffix = 'res'
+            res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@']
+        elif rescomp_type == ResourceCompilerType.windres:
+            # ld only supports object files, so windres is used to generate a
+            # COFF object
+            suffix = 'o'
+            res_args = extra_args + ['@INPUT@', '@OUTPUT@']
+
+            m = 'Argument {!r} has a space which may not work with windres due to ' \
+                'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
+            for arg in extra_args:
+                if ' ' in arg:
+                    mlog.warning(m.format(arg), fatal=False)
+        else:
+            suffix = 'o'
+            res_args = extra_args + ['@INPUT@', '-o', '@OUTPUT@']
+
+        res_targets = []
+
+        def add_target(src):
+            if isinstance(src, list):
+                for subsrc in src:
+                    add_target(subsrc)
+                return
+
+            if isinstance(src, str):
+                name_formatted = src
+                name = os.path.join(state.subdir, src)
+            elif isinstance(src, mesonlib.File):
+                name_formatted = src.fname
+                name = src.relative_name()
+            elif isinstance(src, build.CustomTarget):
+                if len(src.get_outputs()) > 1:
+                    raise MesonException('windows.compile_resources does not accept custom targets with more than 1 output.')
+
+                # Chances are that src.get_filename() is already the name of that
+                # target, add a prefix to avoid name clash.
+                name_formatted = 'windows_compile_resources_' + src.get_filename()
+                name = src.get_id()
+            else:
+                raise MesonException(f'Unexpected source type {src!r}. windows.compile_resources accepts only strings, files, custom targets, and lists thereof.')
+
+            # Path separators are not allowed in target names
+            name = name.replace('/', '_').replace('\\', '_')
+            name_formatted = name_formatted.replace('/', '_').replace('\\', '_')
+
+            res_kwargs = {
+                'output': name + '_@BASENAME@.' + suffix,
+                'input': [src],
+                'command': [rescomp] + res_args,
+                'depend_files': wrc_depend_files,
+                'depends': wrc_depends,
+            }
+
+            # instruct binutils windres to generate a preprocessor depfile
+            if rescomp_type == ResourceCompilerType.windres:
+                res_kwargs['depfile'] = res_kwargs['output'] + '.d'
+                res_kwargs['command'] += ['--preprocessor-arg=-MD', '--preprocessor-arg=-MQ@OUTPUT@', '--preprocessor-arg=-MF@DEPFILE@']
+
+            res_targets.append(build.CustomTarget(name_formatted, state.subdir, state.subproject, res_kwargs))
+
+        add_target(args)
+
+        return ModuleReturnValue(res_targets, [res_targets])
+
+def initialize(*args, **kwargs):
+    return WindowsModule(*args, **kwargs)
diff --git a/meson/mesonbuild/mparser.py b/meson/mesonbuild/mparser.py
new file mode 100644
index 000000000..10796827c
--- /dev/null
+++ b/meson/mesonbuild/mparser.py
@@ -0,0 +1,814 @@
+# Copyright 2014-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import codecs
+import textwrap
+import types
+import typing as T
+from .mesonlib import MesonException
+from . import mlog
+
+if T.TYPE_CHECKING:
+    from .ast import AstVisitor
+
+# This is the regex for the supported escape sequences of a regular string
+# literal, like 'abc\x00'
+ESCAPE_SEQUENCE_SINGLE_RE = re.compile(r'''
+    ( \\U[A-Fa-f0-9]{8}   # 8-digit hex escapes
+    | \\u[A-Fa-f0-9]{4}   # 4-digit hex escapes
+    | \\x[A-Fa-f0-9]{2}   # 2-digit hex escapes
+    | \\[0-7]{1,3}        # Octal escapes
+    | \\N\{[^}]+\}        # Unicode characters by name
+    | \\[\\'abfnrtv]      # Single-character escapes
+    )''', re.UNICODE | re.VERBOSE)
+
+class MesonUnicodeDecodeError(MesonException):
+    def __init__(self, match: str) -> None:
+        super().__init__(match)
+        self.match = match
+
+def decode_match(match: T.Match[str]) -> str:
+    try:
+        return codecs.decode(match.group(0).encode(), 'unicode_escape')
+    except UnicodeDecodeError:
+        raise MesonUnicodeDecodeError(match.group(0))
+
+class ParseException(MesonException):
+    def __init__(self, text: str, line: str, lineno: int, colno: int) -> None:
+        # Format as error message, followed by the line with the error, followed by a caret to show the error column.
+        super().__init__("{}\n{}\n{}".format(text, line, '{}^'.format(' ' * colno)))
+        self.lineno = lineno
+        self.colno = colno
+
+class BlockParseException(MesonException):
+    def __init__(
+                self,
+                text: str,
+                line: str,
+                lineno: int,
+                colno: int,
+                start_line: str,
+                start_lineno: int,
+                start_colno: int,
+            ) -> None:
+        # This can be formatted in two ways - one if the block start and end are on the same line, and a different way if they are on different lines.
+
+        if lineno == start_lineno:
+            # If block start and end are on the same line, it is formatted as:
+            # Error message
+            # Followed by the line with the error
+            # Followed by a caret to show the block start
+            # Followed by underscores
+            # Followed by a caret to show the block end.
+            super().__init__("{}\n{}\n{}".format(text, line, '{}^{}^'.format(' ' * start_colno, '_' * (colno - start_colno - 1))))
+        else:
+            # If block start and end are on different lines, it is formatted as:
+            # Error message
+            # Followed by the line with the error
+            # Followed by a caret to show the error column.
+            # Followed by a message saying where the block started.
+            # Followed by the line of the block start.
+            # Followed by a caret for the block start.
+            super().__init__("%s\n%s\n%s\nFor a block that started at %d,%d\n%s\n%s" % (text, line, '%s^' % (' ' * colno), start_lineno, start_colno, start_line, "%s^" % (' ' * start_colno)))
+        self.lineno = lineno
+        self.colno = colno
+
+TV_TokenTypes = T.TypeVar('TV_TokenTypes', int, str, bool)
+
+class Token(T.Generic[TV_TokenTypes]):
+    def __init__(self, tid: str, filename: str, line_start: int, lineno: int, colno: int, bytespan: T.Tuple[int, int], value: TV_TokenTypes):
+        self.tid = tid                # type: str
+        self.filename = filename      # type: str
+        self.line_start = line_start  # type: int
+        self.lineno = lineno          # type: int
+        self.colno = colno            # type: int
+        self.bytespan = bytespan      # type: T.Tuple[int, int]
+        self.value = value            # type: TV_TokenTypes
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, str):
+            return self.tid == other
+        elif isinstance(other, Token):
+            return self.tid == other.tid
+        return NotImplemented
+
+class Lexer:
+    def __init__(self, code: str):
+        self.code = code
+        self.keywords = {'true', 'false', 'if', 'else', 'elif',
+                         'endif', 'and', 'or', 'not', 'foreach', 'endforeach',
+                         'in', 'continue', 'break'}
+        self.future_keywords = {'return'}
+        self.token_specification = [
+            # Need to be sorted longest to shortest.
+            ('ignore', re.compile(r'[ \t]')),
+            ('fstring', re.compile(r"f'([^'\\]|(\\.))*'")),
+            ('id', re.compile('[_a-zA-Z][_0-9a-zA-Z]*')),
+            ('number', re.compile(r'0[bB][01]+|0[oO][0-7]+|0[xX][0-9a-fA-F]+|0|[1-9]\d*')),
+            ('eol_cont', re.compile(r'\\\n')),
+            ('eol', re.compile(r'\n')),
+            ('multiline_string', re.compile(r"'''(.|\n)*?'''", re.M)),
+            ('comment', re.compile(r'#.*')),
+            ('lparen', re.compile(r'\(')),
+            ('rparen', re.compile(r'\)')),
+            ('lbracket', re.compile(r'\[')),
+            ('rbracket', re.compile(r'\]')),
+            ('lcurl', re.compile(r'\{')),
+            ('rcurl', re.compile(r'\}')),
+            ('dblquote', re.compile(r'"')),
+            ('string', re.compile(r"'([^'\\]|(\\.))*'")),
+            ('comma', re.compile(r',')),
+            ('plusassign', re.compile(r'\+=')),
+            ('dot', re.compile(r'\.')),
+            ('plus', re.compile(r'\+')),
+            ('dash', re.compile(r'-')),
+            ('star', re.compile(r'\*')),
+            ('percent', re.compile(r'%')),
+            ('fslash', re.compile(r'/')),
+            ('colon', re.compile(r':')),
+            ('equal', re.compile(r'==')),
+            ('nequal', re.compile(r'!=')),
+            ('assign', re.compile(r'=')),
+            ('le', re.compile(r'<=')),
+            ('lt', re.compile(r'<')),
+            ('ge', re.compile(r'>=')),
+            ('gt', re.compile(r'>')),
+            ('questionmark', re.compile(r'\?')),
+        ]
+
+    def getline(self, line_start: int) -> str:
+        return self.code[line_start:self.code.find('\n', line_start)]
+
+    def lex(self, filename: str) -> T.Generator[Token, None, None]:
+        line_start = 0
+        lineno = 1
+        loc = 0
+        par_count = 0
+        bracket_count = 0
+        curl_count = 0
+        col = 0
+        while loc < len(self.code):
+            matched = False
+            value = None  # type: T.Union[str, bool, int]
+            for (tid, reg) in self.token_specification:
+                mo = reg.match(self.code, loc)
+                if mo:
+                    curline = lineno
+                    curline_start = line_start
+                    col = mo.start() - line_start
+                    matched = True
+                    span_start = loc
+                    loc = mo.end()
+                    span_end = loc
+                    bytespan = (span_start, span_end)
+                    match_text = mo.group()
+                    if tid == 'ignore' or tid == 'comment':
+                        break
+                    elif tid == 'lparen':
+                        par_count += 1
+                    elif tid == 'rparen':
+                        par_count -= 1
+                    elif tid == 'lbracket':
+                        bracket_count += 1
+                    elif tid == 'rbracket':
+                        bracket_count -= 1
+                    elif tid == 'lcurl':
+                        curl_count += 1
+                    elif tid == 'rcurl':
+                        curl_count -= 1
+                    elif tid == 'dblquote':
+                        raise ParseException('Double quotes are not supported. Use single quotes.', self.getline(line_start), lineno, col)
+                    elif tid in {'string', 'fstring'}:
+                        # Handle here and not on the regexp to give a better error message.
+                        if match_text.find("\n") != -1:
+                            mlog.warning(textwrap.dedent("""\
+                                    Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
+                                    This will become a hard error in a future Meson release.\
+                                """),
+                                self.getline(line_start),
+                                str(lineno),
+                                str(col)
+                            )
+                        value = match_text[2 if tid == 'fstring' else 1:-1]
+                        try:
+                            value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
+                        except MesonUnicodeDecodeError as err:
+                            raise MesonException(f"Failed to parse escape sequence: '{err.match}' in string:\n  {match_text}")
+                    elif tid == 'multiline_string':
+                        tid = 'string'
+                        value = match_text[3:-3]
+                        lines = match_text.split('\n')
+                        if len(lines) > 1:
+                            lineno += len(lines) - 1
+                            line_start = mo.end() - len(lines[-1])
+                    elif tid == 'number':
+                        value = int(match_text, base=0)
+                    elif tid == 'eol_cont':
+                        lineno += 1
+                        line_start = loc
+                        break
+                    elif tid == 'eol':
+                        lineno += 1
+                        line_start = loc
+                        if par_count > 0 or bracket_count > 0 or curl_count > 0:
+                            break
+                    elif tid == 'id':
+                        if match_text in self.keywords:
+                            tid = match_text
+                        else:
+                            if match_text in self.future_keywords:
+                                mlog.warning(f"Identifier '{match_text}' will become a reserved keyword in a future release. Please rename it.",
+                                             location=types.SimpleNamespace(filename=filename, lineno=lineno))
+                            value = match_text
+                    yield Token(tid, filename, curline_start, curline, col, bytespan, value)
+                    break
+            if not matched:
+                raise ParseException('lexer', self.getline(line_start), lineno, col)
+
+class BaseNode:
+    def __init__(self, lineno: int, colno: int, filename: str, end_lineno: T.Optional[int] = None, end_colno: T.Optional[int] = None):
+        self.lineno = lineno      # type: int
+        self.colno = colno        # type: int
+        self.filename = filename  # type: str
+        self.end_lineno = end_lineno if end_lineno is not None else self.lineno
+        self.end_colno = end_colno if end_colno is not None else self.colno
+
+        # Attributes for the visitors
+        self.level = 0            # type: int
+        self.ast_id = ''          # type: str
+        self.condition_level = 0  # type: int
+
+    def accept(self, visitor: 'AstVisitor') -> None:
+        fname = 'visit_{}'.format(type(self).__name__)
+        if hasattr(visitor, fname):
+            func = getattr(visitor, fname)
+            if callable(func):
+                func(self)
+
+class ElementaryNode(T.Generic[TV_TokenTypes], BaseNode):
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.value = token.value        # type: TV_TokenTypes
+        self.bytespan = token.bytespan  # type: T.Tuple[int, int]
+
+class BooleanNode(ElementaryNode[bool]):
+    def __init__(self, token: Token[bool]):
+        super().__init__(token)
+        assert isinstance(self.value, bool)
+
+class IdNode(ElementaryNode[str]):
+    def __init__(self, token: Token[str]):
+        super().__init__(token)
+        assert isinstance(self.value, str)
+
+    def __str__(self) -> str:
+        return "Id node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class NumberNode(ElementaryNode[int]):
+    def __init__(self, token: Token[int]):
+        super().__init__(token)
+        assert isinstance(self.value, int)
+
+class StringNode(ElementaryNode[str]):
+    def __init__(self, token: Token[str]):
+        super().__init__(token)
+        assert isinstance(self.value, str)
+
+    def __str__(self) -> str:
+        return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class FormatStringNode(ElementaryNode[str]):
+    def __init__(self, token: Token[str]):
+        super().__init__(token)
+        assert isinstance(self.value, str)
+
+    def __str__(self) -> str:
+        return "Format string node: '{self.value}' ({self.lineno}, {self.colno})."
+
+class ContinueNode(ElementaryNode):
+    pass
+
+class BreakNode(ElementaryNode):
+    pass
+
+class ArgumentNode(BaseNode):
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.arguments = []  # type: T.List[BaseNode]
+        self.commas = []     # type: T.List[Token[TV_TokenTypes]]
+        self.kwargs = {}     # type: T.Dict[BaseNode, BaseNode]
+        self.order_error = False
+
+    def prepend(self, statement: BaseNode) -> None:
+        if self.num_kwargs() > 0:
+            self.order_error = True
+        if not isinstance(statement, EmptyNode):
+            self.arguments = [statement] + self.arguments
+
+    def append(self, statement: BaseNode) -> None:
+        if self.num_kwargs() > 0:
+            self.order_error = True
+        if not isinstance(statement, EmptyNode):
+            self.arguments += [statement]
+
+    def set_kwarg(self, name: IdNode, value: BaseNode) -> None:
+        if name.value in [x.value for x in self.kwargs.keys() if isinstance(x, IdNode)]:
+            mlog.warning(f'Keyword argument "{name.value}" defined multiple times.', location=self)
+            mlog.warning('This will be an error in future Meson releases.')
+        self.kwargs[name] = value
+
+    def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None:
+        self.kwargs[name] = value
+
+    def num_args(self) -> int:
+        return len(self.arguments)
+
+    def num_kwargs(self) -> int:
+        return len(self.kwargs)
+
+    def incorrect_order(self) -> bool:
+        return self.order_error
+
+    def __len__(self) -> int:
+        return self.num_args() # Fixme
+
+class ArrayNode(BaseNode):
+    def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+        super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.args = args              # type: ArgumentNode
+
+class DictNode(BaseNode):
+    def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+        super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.args = args
+
+class EmptyNode(BaseNode):
+    def __init__(self, lineno: int, colno: int, filename: str):
+        super().__init__(lineno, colno, filename)
+        self.value = None
+
+class OrNode(BaseNode):
+    def __init__(self, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left    # type: BaseNode
+        self.right = right  # type: BaseNode
+
+class AndNode(BaseNode):
+    def __init__(self, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left    # type: BaseNode
+        self.right = right  # type: BaseNode
+
+class ComparisonNode(BaseNode):
+    def __init__(self, ctype: str, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left    # type: BaseNode
+        self.right = right  # type: BaseNode
+        self.ctype = ctype  # type: str
+
+class ArithmeticNode(BaseNode):
+    def __init__(self, operation: str, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left            # type: BaseNode
+        self.right = right          # type: BaseNode
+        self.operation = operation  # type: str
+
+class NotNode(BaseNode):
+    def __init__(self, token: Token[TV_TokenTypes], value: BaseNode):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.value = value  # type: BaseNode
+
+class CodeBlockNode(BaseNode):
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.lines = []  # type: T.List[BaseNode]
+
+class IndexNode(BaseNode):
+    def __init__(self, iobject: BaseNode, index: BaseNode):
+        super().__init__(iobject.lineno, iobject.colno, iobject.filename)
+        self.iobject = iobject  # type: BaseNode
+        self.index = index      # type: BaseNode
+
+class MethodNode(BaseNode):
+    def __init__(self, filename: str, lineno: int, colno: int, source_object: BaseNode, name: str, args: ArgumentNode):
+        super().__init__(lineno, colno, filename)
+        self.source_object = source_object  # type: BaseNode
+        self.name = name                    # type: str
+        assert(isinstance(self.name, str))
+        self.args = args                    # type: ArgumentNode
+
+class FunctionNode(BaseNode):
+    def __init__(self, filename: str, lineno: int, colno: int, end_lineno: int, end_colno: int, func_name: str, args: ArgumentNode):
+        super().__init__(lineno, colno, filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.func_name = func_name  # type: str
+        assert(isinstance(func_name, str))
+        self.args = args  # type: ArgumentNode
+
+class AssignmentNode(BaseNode):
+    def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+        super().__init__(lineno, colno, filename)
+        self.var_name = var_name  # type: str
+        assert(isinstance(var_name, str))
+        self.value = value  # type: BaseNode
+
+class PlusAssignmentNode(BaseNode):
+    def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+        super().__init__(lineno, colno, filename)
+        self.var_name = var_name  # type: str
+        assert(isinstance(var_name, str))
+        self.value = value  # type: BaseNode
+
+class ForeachClauseNode(BaseNode):
+    def __init__(self, token: Token, varnames: T.List[str], items: BaseNode, block: CodeBlockNode):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.varnames = varnames  # type: T.List[str]
+        self.items = items        # type: BaseNode
+        self.block = block        # type: CodeBlockNode
+
+class IfNode(BaseNode):
+    def __init__(self, linenode: BaseNode, condition: BaseNode, block: CodeBlockNode):
+        super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+        self.condition = condition  # type: BaseNode
+        self.block = block          # type: CodeBlockNode
+
+class IfClauseNode(BaseNode):
+    def __init__(self, linenode: BaseNode):
+        super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+        self.ifs = []          # type: T.List[IfNode]
+        self.elseblock = None  # type: T.Union[EmptyNode, CodeBlockNode]
+
+class UMinusNode(BaseNode):
+    def __init__(self, current_location: Token, value: BaseNode):
+        super().__init__(current_location.lineno, current_location.colno, current_location.filename)
+        self.value = value  # type: BaseNode
+
+class TernaryNode(BaseNode):
+    def __init__(self, condition: BaseNode, trueblock: BaseNode, falseblock: BaseNode):
+        super().__init__(condition.lineno, condition.colno, condition.filename)
+        self.condition = condition    # type: BaseNode
+        self.trueblock = trueblock    # type: BaseNode
+        self.falseblock = falseblock  # type: BaseNode
+
+comparison_map = {'equal': '==',
+                  'nequal': '!=',
+                  'lt': '<',
+                  'le': '<=',
+                  'gt': '>',
+                  'ge': '>=',
+                  'in': 'in',
+                  'notin': 'not in',
+                  }
+
+# Recursive descent parser for Meson's definition language.
+# Very basic apart from the fact that we have many precedence
+# levels so there are not enough words to describe them all.
+# Enter numbering:
+#
+# 1 assignment
+# 2 or
+# 3 and
+# 4 comparison
+# 5 arithmetic
+# 6 negation
+# 7 funcall, method call
+# 8 parentheses
+# 9 plain token
+
+class Parser:
+    def __init__(self, code: str, filename: str):
+        self.lexer = Lexer(code)
+        self.stream = self.lexer.lex(filename)
+        self.current = Token('eof', '', 0, 0, 0, (0, 0), None)  # type: Token
+        self.getsym()
+        self.in_ternary = False
+
+    def getsym(self) -> None:
+        try:
+            self.current = next(self.stream)
+        except StopIteration:
+            self.current = Token('eof', '', self.current.line_start, self.current.lineno, self.current.colno + self.current.bytespan[1] - self.current.bytespan[0], (0, 0), None)
+
+    def getline(self) -> str:
+        return self.lexer.getline(self.current.line_start)
+
+    def accept(self, s: str) -> bool:
+        if self.current.tid == s:
+            self.getsym()
+            return True
+        return False
+
+    def accept_any(self, tids: T.Sequence[str]) -> str:
+        tid = self.current.tid
+        if tid in tids:
+            self.getsym()
+            return tid
+        return ''
+
+    def expect(self, s: str) -> bool:
+        if self.accept(s):
+            return True
+        raise ParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno)
+
+    def block_expect(self, s: str, block_start: Token) -> bool:
+        if self.accept(s):
+            return True
+        raise BlockParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno)
+
+    def parse(self) -> CodeBlockNode:
+        block = self.codeblock()
+        self.expect('eof')
+        return block
+
+    def statement(self) -> BaseNode:
+        return self.e1()
+
+    def e1(self) -> BaseNode:
+        left = self.e2()
+        if self.accept('plusassign'):
+            value = self.e1()
+            if not isinstance(left, IdNode):
+                raise ParseException('Plusassignment target must be an id.', self.getline(), left.lineno, left.colno)
+            assert isinstance(left.value, str)
+            return PlusAssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
+        elif self.accept('assign'):
+            value = self.e1()
+            if not isinstance(left, IdNode):
+                raise ParseException('Assignment target must be an id.',
+                                     self.getline(), left.lineno, left.colno)
+            assert isinstance(left.value, str)
+            return AssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
+        elif self.accept('questionmark'):
+            if self.in_ternary:
+                raise ParseException('Nested ternary operators are not allowed.',
+                                     self.getline(), left.lineno, left.colno)
+            self.in_ternary = True
+            trueblock = self.e1()
+            self.expect('colon')
+            falseblock = self.e1()
+            self.in_ternary = False
+            return TernaryNode(left, trueblock, falseblock)
+        return left
+
+    def e2(self) -> BaseNode:
+        left = self.e3()
+        while self.accept('or'):
+            if isinstance(left, EmptyNode):
+                raise ParseException('Invalid or clause.',
+                                     self.getline(), left.lineno, left.colno)
+            left = OrNode(left, self.e3())
+        return left
+
+    def e3(self) -> BaseNode:
+        left = self.e4()
+        while self.accept('and'):
+            if isinstance(left, EmptyNode):
+                raise ParseException('Invalid and clause.',
+                                     self.getline(), left.lineno, left.colno)
+            left = AndNode(left, self.e4())
+        return left
+
+    def e4(self) -> BaseNode:
+        left = self.e5()
+        for nodename, operator_type in comparison_map.items():
+            if self.accept(nodename):
+                return ComparisonNode(operator_type, left, self.e5())
+        if self.accept('not') and self.accept('in'):
+            return ComparisonNode('notin', left, self.e5())
+        return left
+
+    def e5(self) -> BaseNode:
+        return self.e5addsub()
+
+    def e5addsub(self) -> BaseNode:
+        op_map = {
+            'plus': 'add',
+            'dash': 'sub',
+        }
+        left = self.e5muldiv()
+        while True:
+            op = self.accept_any(tuple(op_map.keys()))
+            if op:
+                left = ArithmeticNode(op_map[op], left, self.e5muldiv())
+            else:
+                break
+        return left
+
+    def e5muldiv(self) -> BaseNode:
+        op_map = {
+            'percent': 'mod',
+            'star': 'mul',
+            'fslash': 'div',
+        }
+        left = self.e6()
+        while True:
+            op = self.accept_any(tuple(op_map.keys()))
+            if op:
+                left = ArithmeticNode(op_map[op], left, self.e6())
+            else:
+                break
+        return left
+
+    def e6(self) -> BaseNode:
+        if self.accept('not'):
+            return NotNode(self.current, self.e7())
+        if self.accept('dash'):
+            return UMinusNode(self.current, self.e7())
+        return self.e7()
+
+    def e7(self) -> BaseNode:
+        left = self.e8()
+        block_start = self.current
+        if self.accept('lparen'):
+            args = self.args()
+            self.block_expect('rparen', block_start)
+            if not isinstance(left, IdNode):
+                raise ParseException('Function call must be applied to plain id',
+                                     self.getline(), left.lineno, left.colno)
+            assert isinstance(left.value, str)
+            left = FunctionNode(left.filename, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
+        go_again = True
+        while go_again:
+            go_again = False
+            if self.accept('dot'):
+                go_again = True
+                left = self.method_call(left)
+            if self.accept('lbracket'):
+                go_again = True
+                left = self.index_call(left)
+        return left
+
+    def e8(self) -> BaseNode:
+        block_start = self.current
+        if self.accept('lparen'):
+            e = self.statement()
+            self.block_expect('rparen', block_start)
+            return e
+        elif self.accept('lbracket'):
+            args = self.args()
+            self.block_expect('rbracket', block_start)
+            return ArrayNode(args, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
+        elif self.accept('lcurl'):
+            key_values = self.key_values()
+            self.block_expect('rcurl', block_start)
+            return DictNode(key_values, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
+        else:
+            return self.e9()
+
+    def e9(self) -> BaseNode:
+        t = self.current
+        if self.accept('true'):
+            t.value = True
+            return BooleanNode(t)
+        if self.accept('false'):
+            t.value = False
+            return BooleanNode(t)
+        if self.accept('id'):
+            return IdNode(t)
+        if self.accept('number'):
+            return NumberNode(t)
+        if self.accept('string'):
+            return StringNode(t)
+        if self.accept('fstring'):
+            return FormatStringNode(t)
+        return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+
+    def key_values(self) -> ArgumentNode:
+        s = self.statement()  # type: BaseNode
+        a = ArgumentNode(self.current)
+
+        while not isinstance(s, EmptyNode):
+            if self.accept('colon'):
+                a.set_kwarg_no_check(s, self.statement())
+                potential = self.current
+                if not self.accept('comma'):
+                    return a
+                a.commas.append(potential)
+            else:
+                raise ParseException('Only key:value pairs are valid in dict construction.',
+                                     self.getline(), s.lineno, s.colno)
+            s = self.statement()
+        return a
+
+    def args(self) -> ArgumentNode:
+        s = self.statement()  # type: BaseNode
+        a = ArgumentNode(self.current)
+
+        while not isinstance(s, EmptyNode):
+            potential = self.current
+            if self.accept('comma'):
+                a.commas.append(potential)
+                a.append(s)
+            elif self.accept('colon'):
+                if not isinstance(s, IdNode):
+                    raise ParseException('Dictionary key must be a plain identifier.',
+                                         self.getline(), s.lineno, s.colno)
+                a.set_kwarg(s, self.statement())
+                potential = self.current
+                if not self.accept('comma'):
+                    return a
+                a.commas.append(potential)
+            else:
+                a.append(s)
+                return a
+            s = self.statement()
+        return a
+
+    def method_call(self, source_object: BaseNode) -> MethodNode:
+        methodname = self.e9()
+        if not(isinstance(methodname, IdNode)):
+            raise ParseException('Method name must be plain id',
+                                 self.getline(), self.current.lineno, self.current.colno)
+        assert isinstance(methodname.value, str)
+        self.expect('lparen')
+        args = self.args()
+        self.expect('rparen')
+        method = MethodNode(methodname.filename, methodname.lineno, methodname.colno, source_object, methodname.value, args)
+        if self.accept('dot'):
+            return self.method_call(method)
+        return method
+
+    def index_call(self, source_object: BaseNode) -> IndexNode:
+        index_statement = self.statement()
+        self.expect('rbracket')
+        return IndexNode(source_object, index_statement)
+
+    def foreachblock(self) -> ForeachClauseNode:
+        t = self.current
+        self.expect('id')
+        assert isinstance(t.value, str)
+        varname = t
+        varnames = [t.value]  # type: T.List[str]
+
+        if self.accept('comma'):
+            t = self.current
+            self.expect('id')
+            assert isinstance(t.value, str)
+            varnames.append(t.value)
+
+        self.expect('colon')
+        items = self.statement()
+        block = self.codeblock()
+        return ForeachClauseNode(varname, varnames, items, block)
+
+    def ifblock(self) -> IfClauseNode:
+        condition = self.statement()
+        clause = IfClauseNode(condition)
+        self.expect('eol')
+        block = self.codeblock()
+        clause.ifs.append(IfNode(clause, condition, block))
+        self.elseifblock(clause)
+        clause.elseblock = self.elseblock()
+        return clause
+
+    def elseifblock(self, clause: IfClauseNode) -> None:
+        while self.accept('elif'):
+            s = self.statement()
+            self.expect('eol')
+            b = self.codeblock()
+            clause.ifs.append(IfNode(s, s, b))
+
+    def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]:
+        if self.accept('else'):
+            self.expect('eol')
+            return self.codeblock()
+        return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+
+    def line(self) -> BaseNode:
+        block_start = self.current
+        if self.current == 'eol':
+            return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+        if self.accept('if'):
+            ifblock = self.ifblock()
+            self.block_expect('endif', block_start)
+            return ifblock
+        if self.accept('foreach'):
+            forblock = self.foreachblock()
+            self.block_expect('endforeach', block_start)
+            return forblock
+        if self.accept('continue'):
+            return ContinueNode(self.current)
+        if self.accept('break'):
+            return BreakNode(self.current)
+        return self.statement()
+
+    def codeblock(self) -> CodeBlockNode:
+        block = CodeBlockNode(self.current)
+        cond = True
+        while cond:
+            curline = self.line()
+            if not isinstance(curline, EmptyNode):
+                block.lines.append(curline)
+            cond = self.accept('eol')
+        return block
diff --git a/meson/mesonbuild/msetup.py b/meson/mesonbuild/msetup.py
new file mode 100644
index 000000000..9ed298160
--- /dev/null
+++ b/meson/mesonbuild/msetup.py
@@ -0,0 +1,282 @@
+# Copyright 2016-2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+import time
+import sys, stat
+import datetime
+import os.path
+import platform
+import cProfile as profile
+import argparse
+import tempfile
+import shutil
+import glob
+
+from . import environment, interpreter, mesonlib
+from . import build
+from . import mlog, coredata
+from . import mintro
+from .mconf import make_lower_case
+from .mesonlib import MesonException
+
+git_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
+*
+'''
+
+hg_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
+syntax: glob
+**/*
+'''
+
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    coredata.register_builtin_arguments(parser)
+    parser.add_argument('--native-file',
+                        default=[],
+                        action='append',
+                        help='File containing overrides for native compilation environment.')
+    parser.add_argument('--cross-file',
+                        default=[],
+                        action='append',
+                        help='File describing cross compilation environment.')
+    parser.add_argument('-v', '--version', action='version',
+                        version=coredata.version)
+    parser.add_argument('--profile-self', action='store_true', dest='profile',
+                        help=argparse.SUPPRESS)
+    parser.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings',
+                        help='Make all Meson warnings fatal')
+    parser.add_argument('--reconfigure', action='store_true',
+                        help='Set options and reconfigure the project. Useful when new ' +
+                             'options have been added to the project and the default value ' +
+                             'is not working.')
+    parser.add_argument('--wipe', action='store_true',
+                        help='Wipe build directory and reconfigure using previous command line options. ' +
+                             'Useful when build directory got corrupted, or when rebuilding with a ' +
+                             'newer version of meson.')
+    parser.add_argument('builddir', nargs='?', default=None)
+    parser.add_argument('sourcedir', nargs='?', default=None)
+
+class MesonApp:
+    def __init__(self, options: argparse.Namespace) -> None:
+        (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
+                                                               options.sourcedir,
+                                                               options.reconfigure,
+                                                               options.wipe)
+        if options.wipe:
+            # Make a copy of the cmd line file to make sure we can always
+            # restore that file if anything bad happens. For example if
+            # configuration fails we need to be able to wipe again.
+            restore = []
+            with tempfile.TemporaryDirectory() as d:
+                for filename in [coredata.get_cmd_line_file(self.build_dir)] + glob.glob(os.path.join(self.build_dir, environment.Environment.private_dir, '*.ini')):
+                    try:
+                        restore.append((shutil.copy(filename, d), filename))
+                    except FileNotFoundError:
+                        raise MesonException(
+                            'Cannot find cmd_line.txt. This is probably because this '
+                            'build directory was configured with a meson version < 0.49.0.')
+
+                coredata.read_cmd_line_file(self.build_dir, options)
+
+                try:
+                    # Don't delete the whole tree, just all of the files and
+                    # folders in the tree. Otherwise calling wipe form the builddir
+                    # will cause a crash
+                    for l in os.listdir(self.build_dir):
+                        l = os.path.join(self.build_dir, l)
+                        if os.path.isdir(l) and not os.path.islink(l):
+                            mesonlib.windows_proof_rmtree(l)
+                        else:
+                            mesonlib.windows_proof_rm(l)
+                finally:
+                    self.add_vcs_ignore_files(self.build_dir)
+                    for b, f in restore:
+                        os.makedirs(os.path.dirname(f), exist_ok=True)
+                        shutil.move(b, f)
+
+        self.options = options
+
+    def has_build_file(self, dirname: str) -> bool:
+        fname = os.path.join(dirname, environment.build_filename)
+        return os.path.exists(fname)
+
+    def validate_core_dirs(self, dir1: str, dir2: str) -> T.Tuple[str, str]:
+        if dir1 is None:
+            if dir2 is None:
+                if not os.path.exists('meson.build') and os.path.exists('../meson.build'):
+                    dir2 = '..'
+                else:
+                    raise MesonException('Must specify at least one directory name.')
+            dir1 = os.getcwd()
+        if dir2 is None:
+            dir2 = os.getcwd()
+        ndir1 = os.path.abspath(os.path.realpath(dir1))
+        ndir2 = os.path.abspath(os.path.realpath(dir2))
+        if not os.path.exists(ndir1):
+            os.makedirs(ndir1)
+        if not os.path.exists(ndir2):
+            os.makedirs(ndir2)
+        if not stat.S_ISDIR(os.stat(ndir1).st_mode):
+            raise MesonException(f'{dir1} is not a directory')
+        if not stat.S_ISDIR(os.stat(ndir2).st_mode):
+            raise MesonException(f'{dir2} is not a directory')
+        if os.path.samefile(ndir1, ndir2):
+            # Fallback to textual compare if undefined entries found
+            has_undefined = any((s.st_ino == 0 and s.st_dev == 0) for s in (os.stat(ndir1), os.stat(ndir2)))
+            if not has_undefined or ndir1 == ndir2:
+                raise MesonException('Source and build directories must not be the same. Create a pristine build directory.')
+        if self.has_build_file(ndir1):
+            if self.has_build_file(ndir2):
+                raise MesonException(f'Both directories contain a build file {environment.build_filename}.')
+            return ndir1, ndir2
+        if self.has_build_file(ndir2):
+            return ndir2, ndir1
+        raise MesonException(f'Neither directory contains a build file {environment.build_filename}.')
+
+    def add_vcs_ignore_files(self, build_dir: str) -> None:
+        if os.listdir(build_dir):
+            return
+        with open(os.path.join(build_dir, '.gitignore'), 'w', encoding='utf-8') as ofile:
+            ofile.write(git_ignore_file)
+        with open(os.path.join(build_dir, '.hgignore'), 'w', encoding='utf-8') as ofile:
+            ofile.write(hg_ignore_file)
+
+    def validate_dirs(self, dir1: str, dir2: str, reconfigure: bool, wipe: bool) -> T.Tuple[str, str]:
+        (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2)
+        self.add_vcs_ignore_files(build_dir)
+        priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat')
+        if os.path.exists(priv_dir):
+            if not reconfigure and not wipe:
+                print('Directory already configured.\n'
+                      '\nJust run your build command (e.g. ninja) and Meson will regenerate as necessary.\n'
+                      'If ninja fails, run "ninja reconfigure" or "meson --reconfigure"\n'
+                      'to force Meson to regenerate.\n'
+                      '\nIf build failures persist, run "meson setup --wipe" to rebuild from scratch\n'
+                      'using the same options as passed when configuring the build.'
+                      '\nTo change option values, run "meson configure" instead.')
+                raise SystemExit
+        else:
+            has_cmd_line_file = os.path.exists(coredata.get_cmd_line_file(build_dir))
+            if (wipe and not has_cmd_line_file) or (not wipe and reconfigure):
+                raise SystemExit(f'Directory does not contain a valid build tree:\n{build_dir}')
+        return src_dir, build_dir
+
+    def generate(self) -> None:
+        env = environment.Environment(self.source_dir, self.build_dir, self.options)
+        mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
+        if self.options.profile:
+            mlog.set_timestamp_start(time.monotonic())
+        if env.coredata.options[mesonlib.OptionKey('backend')].value == 'xcode':
+            mlog.warning('xcode backend is currently unmaintained, patches welcome')
+        with mesonlib.BuildDirLock(self.build_dir):
+            self._generate(env)
+
+    def _generate(self, env: environment.Environment) -> None:
+        mlog.debug('Build started at', datetime.datetime.now().isoformat())
+        mlog.debug('Main binary:', sys.executable)
+        mlog.debug('Build Options:', coredata.get_cmd_line_options(self.build_dir, self.options))
+        mlog.debug('Python system:', platform.system())
+        mlog.log(mlog.bold('The Meson build system'))
+        mlog.log('Version:', coredata.version)
+        mlog.log('Source dir:', mlog.bold(self.source_dir))
+        mlog.log('Build dir:', mlog.bold(self.build_dir))
+        if env.is_cross_build():
+            mlog.log('Build type:', mlog.bold('cross build'))
+        else:
+            mlog.log('Build type:', mlog.bold('native build'))
+        b = build.Build(env)
+
+        intr = interpreter.Interpreter(b)
+        if env.is_cross_build():
+            logger_fun = mlog.log
+        else:
+            logger_fun = mlog.debug
+        build_machine = intr.builtin['build_machine']
+        host_machine = intr.builtin['host_machine']
+        target_machine = intr.builtin['target_machine']
+        assert isinstance(build_machine, interpreter.MachineHolder)
+        assert isinstance(host_machine, interpreter.MachineHolder)
+        assert isinstance(target_machine, interpreter.MachineHolder)
+        logger_fun('Build machine cpu family:', mlog.bold(build_machine.cpu_family_method([], {})))
+        logger_fun('Build machine cpu:', mlog.bold(build_machine.cpu_method([], {})))
+        mlog.log('Host machine cpu family:', mlog.bold(host_machine.cpu_family_method([], {})))
+        mlog.log('Host machine cpu:', mlog.bold(host_machine.cpu_method([], {})))
+        logger_fun('Target machine cpu family:', mlog.bold(target_machine.cpu_family_method([], {})))
+        logger_fun('Target machine cpu:', mlog.bold(target_machine.cpu_method([], {})))
+        try:
+            if self.options.profile:
+                fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log')
+                profile.runctx('intr.run()', globals(), locals(), filename=fname)
+            else:
+                intr.run()
+        except Exception as e:
+            mintro.write_meson_info_file(b, [e])
+            raise
+        # Print all default option values that don't match the current value
+        for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options():
+            mlog.log('Option', mlog.bold(def_opt_name), 'is:',
+                     mlog.bold('{}'.format(make_lower_case(cur_opt_value.printable_value()))),
+                     '[default: {}]'.format(make_lower_case(def_opt_value)))
+        try:
+            dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
+            # We would like to write coredata as late as possible since we use the existence of
+            # this file to check if we generated the build file successfully. Since coredata
+            # includes settings, the build files must depend on it and appear newer. However, due
+            # to various kernel caches, we cannot guarantee that any time in Python is exactly in
+            # sync with the time that gets applied to any files. Thus, we dump this file as late as
+            # possible, but before build files, and if any error occurs, delete it.
+            cdf = env.dump_coredata()
+            if self.options.profile:
+                fname = f'profile-{intr.backend.name}-backend.log'
+                fname = os.path.join(self.build_dir, 'meson-private', fname)
+                profile.runctx('intr.backend.generate()', globals(), locals(), filename=fname)
+            else:
+                intr.backend.generate()
+            b.devenv.append(intr.backend.get_devenv())
+            build.save(b, dumpfile)
+            if env.first_invocation:
+                # Use path resolved by coredata because they could have been
+                # read from a pipe and wrote into a private file.
+                self.options.cross_file = env.coredata.cross_files
+                self.options.native_file = env.coredata.config_files
+                coredata.write_cmd_line_file(self.build_dir, self.options)
+            else:
+                coredata.update_cmd_line_file(self.build_dir, self.options)
+
+            # Generate an IDE introspection file with the same syntax as the already existing API
+            if self.options.profile:
+                fname = os.path.join(self.build_dir, 'meson-private', 'profile-introspector.log')
+                profile.runctx('mintro.generate_introspection_file(b, intr.backend)', globals(), locals(), filename=fname)
+            else:
+                mintro.generate_introspection_file(b, intr.backend)
+            mintro.write_meson_info_file(b, [], True)
+
+            # Post-conf scripts must be run after writing coredata or else introspection fails.
+            intr.backend.run_postconf_scripts()
+        except Exception as e:
+            mintro.write_meson_info_file(b, [e])
+            if 'cdf' in locals():
+                old_cdf = cdf + '.prev'
+                if os.path.exists(old_cdf):
+                    os.replace(old_cdf, cdf)
+                else:
+                    os.unlink(cdf)
+            raise
+
+def run(options: argparse.Namespace) -> int:
+    coredata.parse_cmd_line_options(options)
+    app = MesonApp(options)
+    app.generate()
+    return 0
diff --git a/meson/mesonbuild/msubprojects.py b/meson/mesonbuild/msubprojects.py
new file mode 100755
index 000000000..269f0feba
--- /dev/null
+++ b/meson/mesonbuild/msubprojects.py
@@ -0,0 +1,561 @@
+import os, subprocess
+import argparse
+import asyncio
+import threading
+import copy
+import shutil
+from concurrent.futures.thread import ThreadPoolExecutor
+from pathlib import Path
+import typing as T
+
+from . import mlog
+from .mesonlib import quiet_git, GitException, Popen_safe, MesonException, windows_proof_rmtree
+from .wrap.wrap import PackageDefinition, Resolver, WrapException, ALL_TYPES
+from .wrap import wraptool
+
+ALL_TYPES_STRING = ', '.join(ALL_TYPES)
+
+class Logger:
+    def __init__(self, total_tasks: int) -> None:
+        self.lock = threading.Lock()
+        self.total_tasks = total_tasks
+        self.completed_tasks = 0
+        self.running_tasks = set()
+        self.should_erase_line = ''
+
+    def flush(self) -> None:
+        if self.should_erase_line:
+            print(self.should_erase_line, end='\r')
+            self.should_erase_line = ''
+
+    def print_progress(self) -> None:
+        line = f'Progress: {self.completed_tasks} / {self.total_tasks}'
+        max_len = shutil.get_terminal_size().columns - len(line)
+        running = ', '.join(self.running_tasks)
+        if len(running) + 3 > max_len:
+            running = running[:max_len - 6] + '...'
+        line = line + f' ({running})'
+        print(self.should_erase_line, line, sep='', end='\r')
+        self.should_erase_line = '\x1b[K'
+
+    def start(self, wrap_name: str) -> None:
+        with self.lock:
+            self.running_tasks.add(wrap_name)
+            self.print_progress()
+
+    def done(self, wrap_name: str, log_queue: T.List[T.Tuple[mlog.TV_LoggableList, T.Any]]) -> None:
+        with self.lock:
+            self.flush()
+            for args, kwargs in log_queue:
+                mlog.log(*args, **kwargs)
+            self.running_tasks.remove(wrap_name)
+            self.completed_tasks += 1
+            self.print_progress()
+
+
+class Runner:
+    def __init__(self, logger: Logger, r: Resolver, wrap: PackageDefinition, repo_dir: str, options: argparse.Namespace) -> None:
+        # FIXME: Do a copy because Resolver.resolve() is stateful method that
+        # cannot be called from multiple threads.
+        self.wrap_resolver = copy.copy(r)
+        self.wrap = wrap
+        self.repo_dir = repo_dir
+        self.options = options
+        self.run_method = options.subprojects_func.__get__(self)
+        self.log_queue = []
+        self.logger = logger
+
+    def log(self, *args, **kwargs):
+        self.log_queue.append((args, kwargs))
+
+    def run(self):
+        self.logger.start(self.wrap.name)
+        try:
+            result = self.run_method()
+        except MesonException as e:
+            self.log(mlog.red('Error:'), str(e))
+            result = False
+        self.logger.done(self.wrap.name, self.log_queue)
+        return result
+
+    def update_wrapdb_file(self):
+        try:
+            patch_url = self.wrap.get('patch_url')
+            branch, revision = wraptool.parse_patch_url(patch_url)
+        except WrapException:
+            return
+        new_branch, new_revision = wraptool.get_latest_version(self.wrap.name)
+        if new_branch != branch or new_revision != revision:
+            wraptool.update_wrap_file(self.wrap.filename, self.wrap.name, new_branch, new_revision)
+            self.log('  -> New wrap file downloaded.')
+
+    def update_file(self):
+        self.update_wrapdb_file()
+        if not os.path.isdir(self.repo_dir):
+            # The subproject is not needed, or it is a tarball extracted in
+            # 'libfoo-1.0' directory and the version has been bumped and the new
+            # directory is 'libfoo-2.0'. In that case forcing a meson
+            # reconfigure will download and use the new tarball.
+            self.log('  -> Not used.')
+            return True
+        elif self.options.reset:
+            # Delete existing directory and redownload. It is possible that nothing
+            # changed but we have no way to know. Hopefully tarballs are still
+            # cached.
+            windows_proof_rmtree(self.repo_dir)
+            try:
+                self.wrap_resolver.resolve(self.wrap.name, 'meson')
+                self.log('  -> New version extracted')
+                return True
+            except WrapException as e:
+                self.log('  ->', mlog.red(str(e)))
+                return False
+        else:
+            # The subproject has not changed, or the new source and/or patch
+            # tarballs should be extracted in the same directory than previous
+            # version.
+            self.log('  -> Subproject has not changed, or the new source/patch needs to be extracted on the same location.')
+            self.log('     Pass --reset option to delete directory and redownload.')
+            return False
+
+    def git_output(self, cmd):
+        return quiet_git(cmd, self.repo_dir, check=True)[1]
+
+    def git_verbose(self, cmd):
+        self.log(self.git_output(cmd))
+
+    def git_stash(self):
+        # That git command return 1 (failure) when there is something to stash.
+        # We don't want to stash when there is nothing to stash because that would
+        # print spurious "No local changes to save".
+        if not quiet_git(['diff', '--quiet', 'HEAD'], self.repo_dir)[0]:
+            # Don't pipe stdout here because we want the user to see their changes have
+            # been saved.
+            self.git_verbose(['stash'])
+
+    def git_show(self):
+        commit_message = self.git_output(['show', '--quiet', '--pretty=format:%h%n%d%n%s%n[%an]'])
+        parts = [s.strip() for s in commit_message.split('\n')]
+        self.log('  ->', mlog.yellow(parts[0]), mlog.red(parts[1]), parts[2], mlog.blue(parts[3]))
+
+    def git_rebase(self, revision):
+        try:
+            self.git_output(['-c', 'rebase.autoStash=true', 'rebase', 'FETCH_HEAD'])
+        except GitException as e:
+            self.log('  -> Could not rebase', mlog.bold(self.repo_dir), 'onto', mlog.bold(revision))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        return True
+
+    def git_reset(self, revision):
+        try:
+            # Stash local changes, commits can always be found back in reflog, to
+            # avoid any data lost by mistake.
+            self.git_stash()
+            self.git_output(['reset', '--hard', 'FETCH_HEAD'])
+        except GitException as e:
+            self.log('  -> Could not reset', mlog.bold(repo_dir), 'to', mlog.bold(revision))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        return True
+
+    def git_checkout(self, revision, create=False):
+        cmd = ['checkout', '--ignore-other-worktrees', revision, '--']
+        if create:
+            cmd.insert('-b', 1)
+        try:
+            # Stash local changes, commits can always be found back in reflog, to
+            # avoid any data lost by mistake.
+            self.git_stash()
+            self.git_output(cmd)
+        except GitException as e:
+            self.log('  -> Could not checkout', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        return True
+
+    def git_checkout_and_reset(self, revision):
+        # revision could be a branch that already exists but is outdated, so we still
+        # have to reset after the checkout.
+        success = self.git_checkout(revision)
+        if success:
+            success = self.git_reset(revision)
+        return success
+
+    def git_checkout_and_rebase(self, revision):
+        # revision could be a branch that already exists but is outdated, so we still
+        # have to rebase after the checkout.
+        success = self.git_checkout(revision)
+        if success:
+            success = self.git_rebase(revision)
+        return success
+
+    def update_git(self):
+        if not os.path.isdir(self.repo_dir):
+            self.log('  -> Not used.')
+            return True
+        if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+            if self.options.reset:
+                # Delete existing directory and redownload
+                windows_proof_rmtree(self.repo_dir)
+                try:
+                    self.wrap_resolver.resolve(self.wrap.name, 'meson')
+                    self.update_git_done()
+                    return True
+                except WrapException as e:
+                    self.log('  ->', mlog.red(str(e)))
+                    return False
+            else:
+                self.log('  -> Not a git repository.')
+                self.log('Pass --reset option to delete directory and redownload.')
+                return False
+        revision = self.wrap.values.get('revision')
+        url = self.wrap.values.get('url')
+        push_url = self.wrap.values.get('push-url')
+        if not revision or not url:
+            # It could be a detached git submodule for example.
+            self.log('  -> No revision or URL specified.')
+            return True
+        try:
+            origin_url = self.git_output(['remote', 'get-url', 'origin']).strip()
+        except GitException as e:
+            self.log('  -> Failed to determine current origin URL in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        if self.options.reset:
+            try:
+                self.git_output(['remote', 'set-url', 'origin', url])
+                if push_url:
+                    self.git_output(['remote', 'set-url', '--push', 'origin', push_url])
+            except GitException as e:
+                self.log('  -> Failed to reset origin URL in', mlog.bold(self.repo_dir))
+                self.log(mlog.red(e.output))
+                self.log(mlog.red(str(e)))
+                return False
+        elif url != origin_url:
+            self.log(f'  -> URL changed from {origin_url!r} to {url!r}')
+            return False
+        try:
+            # Same as `git branch --show-current` but compatible with older git version
+            branch = self.git_output(['rev-parse', '--abbrev-ref', 'HEAD']).strip()
+            branch = branch if branch != 'HEAD' else ''
+        except GitException as e:
+            self.log('  -> Failed to determine current branch in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        try:
+            # Fetch only the revision we need, this avoids fetching useless branches.
+            # revision can be either a branch, tag or commit id. In all cases we want
+            # FETCH_HEAD to be set to the desired commit and "git checkout "
+            # to to either switch to existing/new branch, or detach to tag/commit.
+            # It is more complicated than it first appear, see discussion there:
+            # https://github.com/mesonbuild/meson/pull/7723#discussion_r488816189.
+            heads_refmap = '+refs/heads/*:refs/remotes/origin/*'
+            tags_refmap = '+refs/tags/*:refs/tags/*'
+            self.git_output(['fetch', '--refmap', heads_refmap, '--refmap', tags_refmap, 'origin', revision])
+        except GitException as e:
+            self.log('  -> Could not fetch revision', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+
+        if branch == '':
+            # We are currently in detached mode
+            if self.options.reset:
+                success = self.git_checkout_and_reset(revision)
+            else:
+                success = self.git_checkout_and_rebase(revision)
+        elif branch == revision:
+            # We are in the same branch. A reset could still be needed in the case
+            # a force push happened on remote repository.
+            if self.options.reset:
+                success = self.git_reset(revision)
+            else:
+                success = self.git_rebase(revision)
+        else:
+            # We are in another branch, either the user created their own branch and
+            # we should rebase it, or revision changed in the wrap file and we need
+            # to checkout the new branch.
+            if self.options.reset:
+                success = self.git_checkout_and_reset(revision)
+            else:
+                success = self.git_rebase(revision)
+        if success:
+            self.update_git_done()
+        return success
+
+    def update_git_done(self):
+        self.git_output(['submodule', 'update', '--checkout', '--recursive'])
+        self.git_show()
+
+    def update_hg(self):
+        if not os.path.isdir(self.repo_dir):
+            self.log('  -> Not used.')
+            return True
+        revno = self.wrap.get('revision')
+        if revno.lower() == 'tip':
+            # Failure to do pull is not a fatal error,
+            # because otherwise you can't develop without
+            # a working net connection.
+            subprocess.call(['hg', 'pull'], cwd=self.repo_dir)
+        else:
+            if subprocess.call(['hg', 'checkout', revno], cwd=self.repo_dir) != 0:
+                subprocess.check_call(['hg', 'pull'], cwd=self.repo_dir)
+                subprocess.check_call(['hg', 'checkout', revno], cwd=self.repo_dir)
+        return True
+
+    def update_svn(self):
+        if not os.path.isdir(self.repo_dir):
+            self.log('  -> Not used.')
+            return True
+        revno = self.wrap.get('revision')
+        p, out, _ = Popen_safe(['svn', 'info', '--show-item', 'revision', self.repo_dir])
+        current_revno = out
+        if current_revno == revno:
+            return True
+        if revno.lower() == 'head':
+            # Failure to do pull is not a fatal error,
+            # because otherwise you can't develop without
+            # a working net connection.
+            subprocess.call(['svn', 'update'], cwd=self.repo_dir)
+        else:
+            subprocess.check_call(['svn', 'update', '-r', revno], cwd=self.repo_dir)
+        return True
+
+    def update(self):
+        self.log(f'Updating {self.wrap.name}...')
+        if self.wrap.type == 'file':
+            return self.update_file()
+        elif self.wrap.type == 'git':
+            return self.update_git()
+        elif self.wrap.type == 'hg':
+            return self.update_hg()
+        elif self.wrap.type == 'svn':
+            return self.update_svn()
+        elif self.wrap.type is None:
+            self.log('  -> Cannot update subproject with no wrap file')
+        else:
+            self.log('  -> Cannot update', self.wrap.type, 'subproject')
+        return True
+
+    def checkout(self):
+        if self.wrap.type != 'git' or not os.path.isdir(self.repo_dir):
+            return True
+        branch_name = self.options.branch_name if self.options.branch_name else self.wrap.get('revision')
+        if not branch_name:
+            # It could be a detached git submodule for example.
+            return True
+        self.log(f'Checkout {branch_name} in {self.wrap.name}...')
+        if self.git_checkout(branch_name, create=self.options.b):
+            self.git_show()
+            return True
+        return False
+
+    def download(self):
+        self.log(f'Download {self.wrap.name}...')
+        if os.path.isdir(self.repo_dir):
+            self.log('  -> Already downloaded')
+            return True
+        try:
+            self.wrap_resolver.resolve(self.wrap.name, 'meson')
+            self.log('  -> done')
+        except WrapException as e:
+            self.log('  ->', mlog.red(str(e)))
+            return False
+        return True
+
+    def foreach(self):
+        self.log(f'Executing command in {self.repo_dir}')
+        if not os.path.isdir(self.repo_dir):
+            self.log('  -> Not downloaded yet')
+            return True
+        cmd = [self.options.command] + self.options.args
+        p, out, _ = Popen_safe(cmd, stderr=subprocess.STDOUT, cwd=self.repo_dir)
+        if p.returncode != 0:
+            err_message = "Command '{}' returned non-zero exit status {}.".format(" ".join(cmd), p.returncode)
+            self.log('  -> ', mlog.red(err_message))
+            self.log(out, end='')
+            return False
+
+        self.log(out, end='')
+        return True
+
+    def purge(self) -> bool:
+        # if subproject is not wrap-based, then don't remove it
+        if not self.wrap.type:
+            return True
+
+        if self.wrap.redirected:
+            redirect_file = Path(self.wrap.original_filename).resolve()
+            if self.options.confirm:
+                redirect_file.unlink()
+            mlog.log(f'Deleting {redirect_file}')
+
+        if self.wrap.type == 'redirect':
+            redirect_file = Path(self.wrap.filename).resolve()
+            if self.options.confirm:
+                redirect_file.unlink()
+            self.log(f'Deleting {redirect_file}')
+
+        if self.options.include_cache:
+            packagecache = Path(self.wrap_resolver.cachedir).resolve()
+            try:
+                subproject_cache_file = packagecache / self.wrap.get("source_filename")
+                if subproject_cache_file.is_file():
+                    if self.options.confirm:
+                        subproject_cache_file.unlink()
+                    self.log(f'Deleting {subproject_cache_file}')
+            except WrapException:
+                pass
+
+            try:
+                subproject_patch_file = packagecache / self.wrap.get("patch_filename")
+                if subproject_patch_file.is_file():
+                    if self.options.confirm:
+                        subproject_patch_file.unlink()
+                    self.log(f'Deleting {subproject_patch_file}')
+            except WrapException:
+                pass
+
+            # Don't log that we will remove an empty directory. Since purge is
+            # parallelized, another thread could have deleted it already.
+            try:
+                if not any(packagecache.iterdir()):
+                    windows_proof_rmtree(str(packagecache))
+            except FileNotFoundError:
+                pass
+
+        subproject_source_dir = Path(self.repo_dir).resolve()
+
+        # Don't follow symlink. This is covered by the next if statement, but why
+        # not be doubly sure.
+        if subproject_source_dir.is_symlink():
+            if self.options.confirm:
+                subproject_source_dir.unlink()
+            self.log(f'Deleting {subproject_source_dir}')
+            return True
+        if not subproject_source_dir.is_dir():
+            return True
+
+        try:
+            if self.options.confirm:
+                windows_proof_rmtree(str(subproject_source_dir))
+            self.log(f'Deleting {subproject_source_dir}')
+        except OSError as e:
+            mlog.error(f'Unable to remove: {subproject_source_dir}: {e}')
+            return False
+
+        return True
+
+    @staticmethod
+    def post_purge(options):
+        if not options.confirm:
+            mlog.log('')
+            mlog.log('Nothing has been deleted, run again with --confirm to apply.')
+
+def add_common_arguments(p):
+    p.add_argument('--sourcedir', default='.',
+                   help='Path to source directory')
+    p.add_argument('--types', default='',
+                   help=f'Comma-separated list of subproject types. Supported types are: {ALL_TYPES_STRING} (default: all)')
+    p.add_argument('--num-processes', default=None, type=int,
+                   help='How many parallel processes to use (Since 0.59.0).')
+
+def add_subprojects_argument(p):
+    p.add_argument('subprojects', nargs='*',
+                   help='List of subprojects (default: all)')
+
+def add_arguments(parser):
+    subparsers = parser.add_subparsers(title='Commands', dest='command')
+    subparsers.required = True
+
+    p = subparsers.add_parser('update', help='Update all subprojects from wrap files')
+    p.add_argument('--rebase', default=True, action='store_true',
+                   help='Rebase your branch on top of wrap\'s revision. ' + \
+                        'Deprecated, it is now the default behaviour. (git only)')
+    p.add_argument('--reset', default=False, action='store_true',
+                   help='Checkout wrap\'s revision and hard reset to that commit. (git only)')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.update)
+
+    p = subparsers.add_parser('checkout', help='Checkout a branch (git only)')
+    p.add_argument('-b', default=False, action='store_true',
+                   help='Create a new branch')
+    p.add_argument('branch_name', nargs='?',
+                   help='Name of the branch to checkout or create (default: revision set in wrap file)')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.checkout)
+
+    p = subparsers.add_parser('download', help='Ensure subprojects are fetched, even if not in use. ' +
+                                               'Already downloaded subprojects are not modified. ' +
+                                               'This can be used to pre-fetch all subprojects and avoid downloads during configure.')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.download)
+
+    p = subparsers.add_parser('foreach', help='Execute a command in each subproject directory.')
+    p.add_argument('command', metavar='command ...',
+                   help='Command to execute in each subproject directory')
+    p.add_argument('args', nargs=argparse.REMAINDER,
+                   help=argparse.SUPPRESS)
+    add_common_arguments(p)
+    p.set_defaults(subprojects=[])
+    p.set_defaults(subprojects_func=Runner.foreach)
+
+    p = subparsers.add_parser('purge', help='Remove all wrap-based subproject artifacts')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.add_argument('--include-cache', action='store_true', default=False, help='Remove the package cache as well')
+    p.add_argument('--confirm', action='store_true', default=False, help='Confirm the removal of subproject artifacts')
+    p.set_defaults(subprojects_func=Runner.purge)
+    p.set_defaults(post_func=Runner.post_purge)
+
+def run(options):
+    src_dir = os.path.relpath(os.path.realpath(options.sourcedir))
+    if not os.path.isfile(os.path.join(src_dir, 'meson.build')):
+        mlog.error('Directory', mlog.bold(src_dir), 'does not seem to be a Meson source directory.')
+        return 1
+    subprojects_dir = os.path.join(src_dir, 'subprojects')
+    if not os.path.isdir(subprojects_dir):
+        mlog.log('Directory', mlog.bold(src_dir), 'does not seem to have subprojects.')
+        return 0
+    r = Resolver(src_dir, 'subprojects')
+    if options.subprojects:
+        wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
+    else:
+        wraps = r.wraps.values()
+    types = [t.strip() for t in options.types.split(',')] if options.types else []
+    for t in types:
+        if t not in ALL_TYPES:
+            raise MesonException(f'Unknown subproject type {t!r}, supported types are: {ALL_TYPES_STRING}')
+    tasks = []
+    task_names = []
+    loop = asyncio.get_event_loop()
+    executor = ThreadPoolExecutor(options.num_processes)
+    if types:
+        wraps = [wrap for wrap in wraps if wrap.type in types]
+    logger = Logger(len(wraps))
+    for wrap in wraps:
+        dirname = Path(subprojects_dir, wrap.directory).as_posix()
+        runner = Runner(logger, r, wrap, dirname, options)
+        task = loop.run_in_executor(executor, runner.run)
+        tasks.append(task)
+        task_names.append(wrap.name)
+    results = loop.run_until_complete(asyncio.gather(*tasks))
+    logger.flush()
+    post_func = getattr(options, 'post_func', None)
+    if post_func:
+        post_func(options)
+    failures = [name for name, success in zip(task_names, results) if not success]
+    if failures:
+        m = 'Please check logs above as command failed in some subprojects which could have been left in conflict state: '
+        m += ', '.join(failures)
+        mlog.warning(m)
+    return len(failures)
diff --git a/meson/mesonbuild/mtest.py b/meson/mesonbuild/mtest.py
new file mode 100644
index 000000000..a44f7f291
--- /dev/null
+++ b/meson/mesonbuild/mtest.py
@@ -0,0 +1,2011 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A tool to run tests in many different ways.
+
+from pathlib import Path
+from collections import deque
+from copy import deepcopy
+import argparse
+import asyncio
+import datetime
+import enum
+import json
+import multiprocessing
+import os
+import pickle
+import platform
+import random
+import re
+import signal
+import subprocess
+import shlex
+import sys
+import textwrap
+import time
+import typing as T
+import unicodedata
+import xml.etree.ElementTree as et
+
+from . import build
+from . import environment
+from . import mlog
+from .coredata import major_versions_differ, MesonVersionMismatchException
+from .coredata import version as coredata_version
+from .mesonlib import (MesonException, OrderedSet, RealPathAction,
+                       get_wine_shortpath, join_args, split_args)
+from .mintro import get_infodir, load_info_file
+from .programs import ExternalProgram
+from .backend.backends import TestProtocol, TestSerialisation
+
+# GNU autotools interprets a return code of 77 from tests it executes to
+# mean that the test should be skipped.
+GNU_SKIP_RETURNCODE = 77
+
+# GNU autotools interprets a return code of 99 from tests it executes to
+# mean that the test failed even before testing what it is supposed to test.
+GNU_ERROR_RETURNCODE = 99
+
+# Exit if 3 Ctrl-C's are received within one second
+MAX_CTRLC = 3
+
+def is_windows() -> bool:
+    platname = platform.system().lower()
+    return platname == 'windows'
+
+def is_cygwin() -> bool:
+    return sys.platform == 'cygwin'
+
+UNIWIDTH_MAPPING = {'F': 2, 'H': 1, 'W': 2, 'Na': 1, 'N': 1, 'A': 1}
+def uniwidth(s: str) -> int:
+    result = 0
+    for c in s:
+        w = unicodedata.east_asian_width(c)
+        result += UNIWIDTH_MAPPING[w]
+    return result
+
+def determine_worker_count() -> int:
+    varname = 'MESON_TESTTHREADS'
+    if varname in os.environ:
+        try:
+            num_workers = int(os.environ[varname])
+        except ValueError:
+            print(f'Invalid value in {varname}, using 1 thread.')
+            num_workers = 1
+    else:
+        try:
+            # Fails in some weird environments such as Debian
+            # reproducible build.
+            num_workers = multiprocessing.cpu_count()
+        except Exception:
+            num_workers = 1
+    return num_workers
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('--repeat', default=1, dest='repeat', type=int,
+                        help='Number of times to run the tests.')
+    parser.add_argument('--no-rebuild', default=False, action='store_true',
+                        help='Do not rebuild before running tests.')
+    parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
+                        help='Run test under gdb.')
+    parser.add_argument('--gdb-path', default='gdb', dest='gdb_path',
+                        help='Path to the gdb binary (default: gdb).')
+    parser.add_argument('--list', default=False, dest='list', action='store_true',
+                        help='List available tests.')
+    parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args,
+                        help='wrapper to run tests with (e.g. Valgrind)')
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        # https://github.com/python/typeshed/issues/3107
+                        # https://github.com/python/mypy/issues/7177
+                        type=os.path.abspath,  # type: ignore
+                        help='directory to cd into before running')
+    parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
+                        help='Only run tests belonging to the given suite.')
+    parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
+                        help='Do not run tests belonging to the given suite.')
+    parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
+                        help='Do not split stderr and stdout in test logs.')
+    parser.add_argument('--print-errorlogs', default=False, action='store_true',
+                        help="Whether to print failing tests' logs.")
+    parser.add_argument('--benchmark', default=False, action='store_true',
+                        help="Run benchmarks instead of tests.")
+    parser.add_argument('--logbase', default='testlog',
+                        help="Base name for log file.")
+    parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
+                        help='How many parallel processes to use.')
+    parser.add_argument('-v', '--verbose', default=False, action='store_true',
+                        help='Do not redirect stdout and stderr')
+    parser.add_argument('-q', '--quiet', default=False, action='store_true',
+                        help='Produce less output to the terminal.')
+    parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
+                        help='Define a multiplier for test timeout, for example '
+                        ' when running tests in particular conditions they might take'
+                        ' more time to execute. (<= 0 to disable timeout)')
+    parser.add_argument('--setup', default=None, dest='setup',
+                        help='Which test setup to use.')
+    parser.add_argument('--test-args', default=[], type=split_args,
+                        help='Arguments to pass to the specified test(s) or all tests')
+    parser.add_argument('args', nargs='*',
+                        help='Optional list of test names to run. "testname" to run all tests with that name, '
+                        '"subprojname:testname" to specifically run "testname" from "subprojname", '
+                        '"subprojname:" to run all tests defined by "subprojname".')
+
+
+def print_safe(s: str) -> None:
+    end = '' if s[-1] == '\n' else '\n'
+    try:
+        print(s, end=end)
+    except UnicodeEncodeError:
+        s = s.encode('ascii', errors='backslashreplace').decode('ascii')
+        print(s, end=end)
+
+def join_lines(a: str, b: str) -> str:
+    if not a:
+        return b
+    if not b:
+        return a
+    return a + '\n' + b
+
+def dashes(s: str, dash: str, cols: int) -> str:
+    if not s:
+        return dash * cols
+    s = ' ' + s + ' '
+    width = uniwidth(s)
+    first = (cols - width) // 2
+    s = dash * first + s
+    return s + dash * (cols - first - width)
+
+def returncode_to_status(retcode: int) -> str:
+    # Note: We can't use `os.WIFSIGNALED(result.returncode)` and the related
+    # functions here because the status returned by subprocess is munged. It
+    # returns a negative value if the process was killed by a signal rather than
+    # the raw status returned by `wait()`. Also, If a shell sits between Meson
+    # the the actual unit test that shell is likely to convert a termination due
+    # to a signal into an exit status of 128 plus the signal number.
+    if retcode < 0:
+        signum = -retcode
+        try:
+            signame = signal.Signals(signum).name
+        except ValueError:
+            signame = 'SIGinvalid'
+        return f'killed by signal {signum} {signame}'
+
+    if retcode <= 128:
+        return f'exit status {retcode}'
+
+    signum = retcode - 128
+    try:
+        signame = signal.Signals(signum).name
+    except ValueError:
+        signame = 'SIGinvalid'
+    return f'(exit status {retcode} or signal {signum} {signame})'
+
+# TODO for Windows
+sh_quote: T.Callable[[str], str] = lambda x: x
+if not is_windows():
+    sh_quote = shlex.quote
+
+def env_tuple_to_str(env: T.Iterable[T.Tuple[str, str]]) -> str:
+    return ''.join(["{}={} ".format(k, sh_quote(v)) for k, v in env])
+
+
+class TestException(MesonException):
+    pass
+
+
+@enum.unique
+class ConsoleUser(enum.Enum):
+
+    # the logger can use the console
+    LOGGER = 0
+
+    # the console is used by gdb
+    GDB = 1
+
+    # the console is used to write stdout/stderr
+    STDOUT = 2
+
+
+@enum.unique
+class TestResult(enum.Enum):
+
+    PENDING = 'PENDING'
+    RUNNING = 'RUNNING'
+    OK = 'OK'
+    TIMEOUT = 'TIMEOUT'
+    INTERRUPT = 'INTERRUPT'
+    SKIP = 'SKIP'
+    FAIL = 'FAIL'
+    EXPECTEDFAIL = 'EXPECTEDFAIL'
+    UNEXPECTEDPASS = 'UNEXPECTEDPASS'
+    ERROR = 'ERROR'
+
+    @staticmethod
+    def maxlen() -> int:
+        return 14 # len(UNEXPECTEDPASS)
+
+    def is_ok(self) -> bool:
+        return self in {TestResult.OK, TestResult.EXPECTEDFAIL}
+
+    def is_bad(self) -> bool:
+        return self in {TestResult.FAIL, TestResult.TIMEOUT, TestResult.INTERRUPT,
+                        TestResult.UNEXPECTEDPASS, TestResult.ERROR}
+
+    def is_finished(self) -> bool:
+        return self not in {TestResult.PENDING, TestResult.RUNNING}
+
+    def was_killed(self) -> bool:
+        return self in (TestResult.TIMEOUT, TestResult.INTERRUPT)
+
+    def colorize(self, s: str) -> mlog.AnsiDecorator:
+        if self.is_bad():
+            decorator = mlog.red
+        elif self in (TestResult.SKIP, TestResult.EXPECTEDFAIL):
+            decorator = mlog.yellow
+        elif self.is_finished():
+            decorator = mlog.green
+        else:
+            decorator = mlog.blue
+        return decorator(s)
+
+    def get_text(self, colorize: bool) -> str:
+        result_str = '{res:{reslen}}'.format(res=self.value, reslen=self.maxlen())
+        return self.colorize(result_str).get_text(colorize)
+
+    def get_command_marker(self) -> str:
+        return str(self.colorize('>>> '))
+
+
+TYPE_TAPResult = T.Union['TAPParser.Test', 'TAPParser.Error', 'TAPParser.Version', 'TAPParser.Plan', 'TAPParser.Bailout']
+
+class TAPParser:
+    class Plan(T.NamedTuple):
+        num_tests: int
+        late: bool
+        skipped: bool
+        explanation: T.Optional[str]
+
+    class Bailout(T.NamedTuple):
+        message: str
+
+    class Test(T.NamedTuple):
+        number: int
+        name: str
+        result: TestResult
+        explanation: T.Optional[str]
+
+        def __str__(self) -> str:
+            return f'{self.number} {self.name}'.strip()
+
+    class Error(T.NamedTuple):
+        message: str
+
+    class Version(T.NamedTuple):
+        version: int
+
+    _MAIN = 1
+    _AFTER_TEST = 2
+    _YAML = 3
+
+    _RE_BAILOUT = re.compile(r'Bail out!\s*(.*)')
+    _RE_DIRECTIVE = re.compile(r'(?:\s*\#\s*([Ss][Kk][Ii][Pp]\S*|[Tt][Oo][Dd][Oo])\b\s*(.*))?')
+    _RE_PLAN = re.compile(r'1\.\.([0-9]+)' + _RE_DIRECTIVE.pattern)
+    _RE_TEST = re.compile(r'((?:not )?ok)\s*(?:([0-9]+)\s*)?([^#]*)' + _RE_DIRECTIVE.pattern)
+    _RE_VERSION = re.compile(r'TAP version ([0-9]+)')
+    _RE_YAML_START = re.compile(r'(\s+)---.*')
+    _RE_YAML_END = re.compile(r'\s+\.\.\.\s*')
+
+    found_late_test = False
+    bailed_out = False
+    plan: T.Optional[Plan] = None
+    lineno = 0
+    num_tests = 0
+    yaml_lineno: T.Optional[int] = None
+    yaml_indent = ''
+    state = _MAIN
+    version = 12
+
+    def parse_test(self, ok: bool, num: int, name: str, directive: T.Optional[str], explanation: T.Optional[str]) -> \
+            T.Generator[T.Union['TAPParser.Test', 'TAPParser.Error'], None, None]:
+        name = name.strip()
+        explanation = explanation.strip() if explanation else None
+        if directive is not None:
+            directive = directive.upper()
+            if directive.startswith('SKIP'):
+                if ok:
+                    yield self.Test(num, name, TestResult.SKIP, explanation)
+                    return
+            elif directive == 'TODO':
+                yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation)
+                return
+            else:
+                yield self.Error(f'invalid directive "{directive}"')
+
+        yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation)
+
+    async def parse_async(self, lines: T.AsyncIterator[str]) -> T.AsyncIterator[TYPE_TAPResult]:
+        async for line in lines:
+            for event in self.parse_line(line):
+                yield event
+        for event in self.parse_line(None):
+            yield event
+
+    def parse(self, io: T.Iterator[str]) -> T.Iterator[TYPE_TAPResult]:
+        for line in io:
+            yield from self.parse_line(line)
+        yield from self.parse_line(None)
+
+    def parse_line(self, line: T.Optional[str]) -> T.Iterator[TYPE_TAPResult]:
+        if line is not None:
+            self.lineno += 1
+            line = line.rstrip()
+
+            # YAML blocks are only accepted after a test
+            if self.state == self._AFTER_TEST:
+                if self.version >= 13:
+                    m = self._RE_YAML_START.match(line)
+                    if m:
+                        self.state = self._YAML
+                        self.yaml_lineno = self.lineno
+                        self.yaml_indent = m.group(1)
+                        return
+                self.state = self._MAIN
+
+            elif self.state == self._YAML:
+                if self._RE_YAML_END.match(line):
+                    self.state = self._MAIN
+                    return
+                if line.startswith(self.yaml_indent):
+                    return
+                yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
+                self.state = self._MAIN
+
+            assert self.state == self._MAIN
+            if line.startswith('#'):
+                return
+
+            m = self._RE_TEST.match(line)
+            if m:
+                if self.plan and self.plan.late and not self.found_late_test:
+                    yield self.Error('unexpected test after late plan')
+                    self.found_late_test = True
+                self.num_tests += 1
+                num = self.num_tests if m.group(2) is None else int(m.group(2))
+                if num != self.num_tests:
+                    yield self.Error('out of order test numbers')
+                yield from self.parse_test(m.group(1) == 'ok', num,
+                                           m.group(3), m.group(4), m.group(5))
+                self.state = self._AFTER_TEST
+                return
+
+            m = self._RE_PLAN.match(line)
+            if m:
+                if self.plan:
+                    yield self.Error('more than one plan found')
+                else:
+                    num_tests = int(m.group(1))
+                    skipped = (num_tests == 0)
+                    if m.group(2):
+                        if m.group(2).upper().startswith('SKIP'):
+                            if num_tests > 0:
+                                yield self.Error('invalid SKIP directive for plan')
+                            skipped = True
+                        else:
+                            yield self.Error('invalid directive for plan')
+                    self.plan = self.Plan(num_tests=num_tests, late=(self.num_tests > 0),
+                                          skipped=skipped, explanation=m.group(3))
+                    yield self.plan
+                return
+
+            m = self._RE_BAILOUT.match(line)
+            if m:
+                yield self.Bailout(m.group(1))
+                self.bailed_out = True
+                return
+
+            m = self._RE_VERSION.match(line)
+            if m:
+                # The TAP version is only accepted as the first line
+                if self.lineno != 1:
+                    yield self.Error('version number must be on the first line')
+                    return
+                self.version = int(m.group(1))
+                if self.version < 13:
+                    yield self.Error('version number should be at least 13')
+                else:
+                    yield self.Version(version=self.version)
+                return
+
+            if not line:
+                return
+
+            yield self.Error('unexpected input at line {}'.format((self.lineno,)))
+        else:
+            # end of file
+            if self.state == self._YAML:
+                yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
+
+            if not self.bailed_out and self.plan and self.num_tests != self.plan.num_tests:
+                if self.num_tests < self.plan.num_tests:
+                    yield self.Error(f'Too few tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+                else:
+                    yield self.Error(f'Too many tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+
+class TestLogger:
+    def flush(self) -> None:
+        pass
+
+    def start(self, harness: 'TestHarness') -> None:
+        pass
+
+    def start_test(self, harness: 'TestHarness', test: 'TestRun') -> None:
+        pass
+
+    def log_subtest(self, harness: 'TestHarness', test: 'TestRun', s: str, res: TestResult) -> None:
+        pass
+
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        pass
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        pass
+
+    def close(self) -> None:
+        pass
+
+
+class TestFileLogger(TestLogger):
+    def __init__(self, filename: str, errors: str = 'replace') -> None:
+        self.filename = filename
+        self.file = open(filename, 'w', encoding='utf-8', errors=errors)
+
+    def close(self) -> None:
+        if self.file:
+            self.file.close()
+            self.file = None
+
+
+class ConsoleLogger(TestLogger):
+    SPINNER = "\U0001f311\U0001f312\U0001f313\U0001f314" + \
+              "\U0001f315\U0001f316\U0001f317\U0001f318"
+
+    SCISSORS = "\u2700 "
+    HLINE = "\u2015"
+    RTRI = "\u25B6 "
+
+    def __init__(self) -> None:
+        self.update = asyncio.Event()
+        self.running_tests = OrderedSet()  # type: OrderedSet['TestRun']
+        self.progress_test = None          # type: T.Optional['TestRun']
+        self.progress_task = None          # type: T.Optional[asyncio.Future]
+        self.max_left_width = 0            # type: int
+        self.stop = False
+        self.update = asyncio.Event()
+        self.should_erase_line = ''
+        self.test_count = 0
+        self.started_tests = 0
+        self.spinner_index = 0
+        try:
+            self.cols, _ = os.get_terminal_size(1)
+            self.is_tty = True
+        except OSError:
+            self.cols = 80
+            self.is_tty = False
+
+        self.output_start = dashes(self.SCISSORS, self.HLINE, self.cols - 2)
+        self.output_end = dashes('', self.HLINE, self.cols - 2)
+        self.sub = self.RTRI
+        try:
+            self.output_start.encode(sys.stdout.encoding or 'ascii')
+        except UnicodeEncodeError:
+            self.output_start = dashes('8<', '-', self.cols - 2)
+            self.output_end = dashes('', '-', self.cols - 2)
+            self.sub = '| '
+
+    def flush(self) -> None:
+        if self.should_erase_line:
+            print(self.should_erase_line, end='')
+            self.should_erase_line = ''
+
+    def print_progress(self, line: str) -> None:
+        print(self.should_erase_line, line, sep='', end='\r')
+        self.should_erase_line = '\x1b[K'
+
+    def request_update(self) -> None:
+        self.update.set()
+
+    def emit_progress(self, harness: 'TestHarness') -> None:
+        if self.progress_test is None:
+            self.flush()
+            return
+
+        if len(self.running_tests) == 1:
+            count = f'{self.started_tests}/{self.test_count}'
+        else:
+            count = '{}-{}/{}'.format(self.started_tests - len(self.running_tests) + 1,
+                                      self.started_tests, self.test_count)
+
+        left = '[{}] {} '.format(count, self.SPINNER[self.spinner_index])
+        self.spinner_index = (self.spinner_index + 1) % len(self.SPINNER)
+
+        right = '{spaces} {dur:{durlen}}'.format(
+            spaces=' ' * TestResult.maxlen(),
+            dur=int(time.time() - self.progress_test.starttime),
+            durlen=harness.duration_max_len)
+        if self.progress_test.timeout:
+            right += '/{timeout:{durlen}}'.format(
+                timeout=self.progress_test.timeout,
+                durlen=harness.duration_max_len)
+        right += 's'
+        detail = self.progress_test.detail
+        if detail:
+            right += '   ' + detail
+
+        line = harness.format(self.progress_test, colorize=True,
+                              max_left_width=self.max_left_width,
+                              left=left, right=right)
+        self.print_progress(line)
+
+    def start(self, harness: 'TestHarness') -> None:
+        async def report_progress() -> None:
+            loop = asyncio.get_event_loop()
+            next_update = 0.0
+            self.request_update()
+            while not self.stop:
+                await self.update.wait()
+                self.update.clear()
+
+                # We may get here simply because the progress line has been
+                # overwritten, so do not always switch.  Only do so every
+                # second, or if the printed test has finished
+                if loop.time() >= next_update:
+                    self.progress_test = None
+                    next_update = loop.time() + 1
+                    loop.call_at(next_update, self.request_update)
+
+                if (self.progress_test and
+                        self.progress_test.res is not TestResult.RUNNING):
+                    self.progress_test = None
+
+                if not self.progress_test:
+                    if not self.running_tests:
+                        continue
+                    # Pick a test in round robin order
+                    self.progress_test = self.running_tests.pop(last=False)
+                    self.running_tests.add(self.progress_test)
+
+                self.emit_progress(harness)
+            self.flush()
+
+        self.test_count = harness.test_count
+        self.cols = max(self.cols, harness.max_left_width + 30)
+
+        if self.is_tty and not harness.need_console:
+            # Account for "[aa-bb/cc] OO " in the progress report
+            self.max_left_width = 3 * len(str(self.test_count)) + 8
+            self.progress_task = asyncio.ensure_future(report_progress())
+
+    def start_test(self, harness: 'TestHarness', test: 'TestRun') -> None:
+        if harness.options.verbose and test.cmdline:
+            self.flush()
+            print(harness.format(test, mlog.colorize_console(),
+                                 max_left_width=self.max_left_width,
+                                 right=test.res.get_text(mlog.colorize_console())))
+            print(test.res.get_command_marker() + test.cmdline)
+            if test.needs_parsing:
+                pass
+            elif not test.is_parallel:
+                print(self.output_start, flush=True)
+            else:
+                print(flush=True)
+
+        self.started_tests += 1
+        self.running_tests.add(test)
+        self.running_tests.move_to_end(test, last=False)
+        self.request_update()
+
+    def shorten_log(self, harness: 'TestHarness', result: 'TestRun') -> str:
+        if not harness.options.verbose and not harness.options.print_errorlogs:
+            return ''
+
+        log = result.get_log(mlog.colorize_console(),
+                             stderr_only=result.needs_parsing)
+        if harness.options.verbose:
+            return log
+
+        lines = log.splitlines()
+        if len(lines) < 100:
+            return log
+        else:
+            return str(mlog.bold('Listing only the last 100 lines from a long log.\n')) + '\n'.join(lines[-100:])
+
+    def print_log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        if not harness.options.verbose:
+            cmdline = result.cmdline
+            if not cmdline:
+                print(result.res.get_command_marker() + result.stdo)
+                return
+            print(result.res.get_command_marker() + cmdline)
+
+        log = self.shorten_log(harness, result)
+        if log:
+            print(self.output_start)
+            print_safe(log)
+            print(self.output_end)
+
+    def log_subtest(self, harness: 'TestHarness', test: 'TestRun', s: str, result: TestResult) -> None:
+        if harness.options.verbose or (harness.options.print_errorlogs and result.is_bad()):
+            self.flush()
+            print(harness.format(test, mlog.colorize_console(), max_left_width=self.max_left_width,
+                                 prefix=self.sub,
+                                 middle=s,
+                                 right=result.get_text(mlog.colorize_console())), flush=True)
+
+            self.request_update()
+
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        self.running_tests.remove(result)
+        if result.res is TestResult.TIMEOUT and harness.options.verbose:
+            self.flush()
+            print(f'{result.name} time out (After {result.timeout} seconds)')
+
+        if not harness.options.quiet or not result.res.is_ok():
+            self.flush()
+            if harness.options.verbose and not result.is_parallel and result.cmdline:
+                if not result.needs_parsing:
+                    print(self.output_end)
+                print(harness.format(result, mlog.colorize_console(), max_left_width=self.max_left_width))
+            else:
+                print(harness.format(result, mlog.colorize_console(), max_left_width=self.max_left_width),
+                      flush=True)
+                if harness.options.verbose or result.res.is_bad():
+                    self.print_log(harness, result)
+            if harness.options.verbose or result.res.is_bad():
+                print(flush=True)
+
+        self.request_update()
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        self.stop = True
+        self.request_update()
+        if self.progress_task:
+            await self.progress_task
+
+        if harness.collected_failures and \
+                (harness.options.print_errorlogs or harness.options.verbose):
+            print("\nSummary of Failures:\n")
+            for i, result in enumerate(harness.collected_failures, 1):
+                print(harness.format(result, mlog.colorize_console()))
+
+        print(harness.summary())
+
+
+class TextLogfileBuilder(TestFileLogger):
+    def start(self, harness: 'TestHarness') -> None:
+        self.file.write(f'Log of Meson test suite run on {datetime.datetime.now().isoformat()}\n\n')
+        inherit_env = env_tuple_to_str(os.environ.items())
+        self.file.write(f'Inherited environment: {inherit_env}\n\n')
+
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        self.file.write(harness.format(result, False) + '\n')
+        cmdline = result.cmdline
+        if cmdline:
+            starttime_str = time.strftime("%H:%M:%S", time.gmtime(result.starttime))
+            self.file.write(starttime_str + ' ' + cmdline + '\n')
+            self.file.write(dashes('output', '-', 78) + '\n')
+            self.file.write(result.get_log())
+            self.file.write(dashes('', '-', 78) + '\n\n')
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        if harness.collected_failures:
+            self.file.write("\nSummary of Failures:\n\n")
+            for i, result in enumerate(harness.collected_failures, 1):
+                self.file.write(harness.format(result, False) + '\n')
+        self.file.write(harness.summary())
+
+        print(f'Full log written to {self.filename}')
+
+
+class JsonLogfileBuilder(TestFileLogger):
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        jresult = {'name': result.name,
+                   'stdout': result.stdo,
+                   'result': result.res.value,
+                   'starttime': result.starttime,
+                   'duration': result.duration,
+                   'returncode': result.returncode,
+                   'env': result.env,
+                   'command': result.cmd}  # type: T.Dict[str, T.Any]
+        if result.stde:
+            jresult['stderr'] = result.stde
+        self.file.write(json.dumps(jresult) + '\n')
+
+
+class JunitBuilder(TestLogger):
+
+    """Builder for Junit test results.
+
+    Junit is impossible to stream out, it requires attributes counting the
+    total number of tests, failures, skips, and errors in the root element
+    and in each test suite. As such, we use a builder class to track each
+    test case, and calculate all metadata before writing it out.
+
+    For tests with multiple results (like from a TAP test), we record the
+    test as a suite with the project_name.test_name. This allows us to track
+    each result separately. For tests with only one result (such as exit-code
+    tests) we record each one into a suite with the name project_name. The use
+    of the project_name allows us to sort subproject tests separately from
+    the root project.
+    """
+
+    def __init__(self, filename: str) -> None:
+        self.filename = filename
+        self.root = et.Element(
+            'testsuites', tests='0', errors='0', failures='0')
+        self.suites = {}  # type: T.Dict[str, et.Element]
+
+    def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
+        """Log a single test case."""
+        if test.junit is not None:
+            for suite in test.junit.findall('.//testsuite'):
+                # Assume that we don't need to merge anything here...
+                suite.attrib['name'] = '{}.{}.{}'.format(test.project, test.name, suite.attrib['name'])
+
+                # GTest can inject invalid attributes
+                for case in suite.findall('.//testcase[@result]'):
+                    del case.attrib['result']
+                for case in suite.findall('.//testcase[@timestamp]'):
+                    del case.attrib['timestamp']
+                self.root.append(suite)
+            return
+
+        # In this case we have a test binary with multiple results.
+        # We want to record this so that each result is recorded
+        # separately
+        if test.results:
+            suitename = f'{test.project}.{test.name}'
+            assert suitename not in self.suites or harness.options.repeat > 1, 'duplicate suite'
+
+            suite = self.suites[suitename] = et.Element(
+                'testsuite',
+                name=suitename,
+                tests=str(len(test.results)),
+                errors=str(sum(1 for r in test.results if r.result in
+                               {TestResult.INTERRUPT, TestResult.ERROR})),
+                failures=str(sum(1 for r in test.results if r.result in
+                                 {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
+                skipped=str(sum(1 for r in test.results if r.result is TestResult.SKIP)),
+                time=str(test.duration),
+            )
+
+            for subtest in test.results:
+                # Both name and classname are required. Use the suite name as
+                # the class name, so that e.g. GitLab groups testcases correctly.
+                testcase = et.SubElement(suite, 'testcase', name=str(subtest), classname=suitename)
+                if subtest.result is TestResult.SKIP:
+                    et.SubElement(testcase, 'skipped')
+                elif subtest.result is TestResult.ERROR:
+                    et.SubElement(testcase, 'error')
+                elif subtest.result is TestResult.FAIL:
+                    et.SubElement(testcase, 'failure')
+                elif subtest.result is TestResult.UNEXPECTEDPASS:
+                    fail = et.SubElement(testcase, 'failure')
+                    fail.text = 'Test unexpected passed.'
+                elif subtest.result is TestResult.INTERRUPT:
+                    fail = et.SubElement(testcase, 'error')
+                    fail.text = 'Test was interrupted by user.'
+                elif subtest.result is TestResult.TIMEOUT:
+                    fail = et.SubElement(testcase, 'error')
+                    fail.text = 'Test did not finish before configured timeout.'
+                if subtest.explanation:
+                    et.SubElement(testcase, 'system-out').text = subtest.explanation
+            if test.stdo:
+                out = et.SubElement(suite, 'system-out')
+                out.text = test.stdo.rstrip()
+            if test.stde:
+                err = et.SubElement(suite, 'system-err')
+                err.text = test.stde.rstrip()
+        else:
+            if test.project not in self.suites:
+                suite = self.suites[test.project] = et.Element(
+                    'testsuite', name=test.project, tests='1', errors='0',
+                    failures='0', skipped='0', time=str(test.duration))
+            else:
+                suite = self.suites[test.project]
+                suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1)
+
+            testcase = et.SubElement(suite, 'testcase', name=test.name,
+                                     classname=test.project, time=str(test.duration))
+            if test.res is TestResult.SKIP:
+                et.SubElement(testcase, 'skipped')
+                suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+            elif test.res is TestResult.ERROR:
+                et.SubElement(testcase, 'error')
+                suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+            elif test.res is TestResult.FAIL:
+                et.SubElement(testcase, 'failure')
+                suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+            if test.stdo:
+                out = et.SubElement(testcase, 'system-out')
+                out.text = test.stdo.rstrip()
+            if test.stde:
+                err = et.SubElement(testcase, 'system-err')
+                err.text = test.stde.rstrip()
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        """Calculate total test counts and write out the xml result."""
+        for suite in self.suites.values():
+            self.root.append(suite)
+            # Skipped is really not allowed in the "testsuits" element
+            for attr in ['tests', 'errors', 'failures']:
+                self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr]))
+
+        tree = et.ElementTree(self.root)
+        with open(self.filename, 'wb') as f:
+            tree.write(f, encoding='utf-8', xml_declaration=True)
+
+
+class TestRun:
+    TEST_NUM = 0
+    PROTOCOL_TO_CLASS: T.Dict[TestProtocol, T.Type['TestRun']] = {}
+
+    def __new__(cls, test: TestSerialisation, *args: T.Any, **kwargs: T.Any) -> T.Any:
+        return super().__new__(TestRun.PROTOCOL_TO_CLASS[test.protocol])
+
+    def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
+                 name: str, timeout: T.Optional[int], is_parallel: bool):
+        self.res = TestResult.PENDING
+        self.test = test
+        self._num = None       # type: T.Optional[int]
+        self.name = name
+        self.timeout = timeout
+        self.results = list()  # type: T.List[TAPParser.Test]
+        self.returncode = 0
+        self.starttime = None  # type: T.Optional[float]
+        self.duration = None   # type: T.Optional[float]
+        self.stdo = None       # type: T.Optional[str]
+        self.stde = None       # type: T.Optional[str]
+        self.cmd = None        # type: T.Optional[T.List[str]]
+        self.env = test_env    # type: T.Dict[str, str]
+        self.should_fail = test.should_fail
+        self.project = test.project_name
+        self.junit = None      # type: T.Optional[et.ElementTree]
+        self.is_parallel = is_parallel
+
+    def start(self, cmd: T.List[str]) -> None:
+        self.res = TestResult.RUNNING
+        self.starttime = time.time()
+        self.cmd = cmd
+
+    @property
+    def num(self) -> int:
+        if self._num is None:
+            TestRun.TEST_NUM += 1
+            self._num = TestRun.TEST_NUM
+        return self._num
+
+    @property
+    def detail(self) -> str:
+        if self.res is TestResult.PENDING:
+            return ''
+        if self.returncode:
+            return returncode_to_status(self.returncode)
+        if self.results:
+            # running or succeeded
+            passed = sum(x.result.is_ok() for x in self.results)
+            ran = sum(x.result is not TestResult.SKIP for x in self.results)
+            if passed == ran:
+                return f'{passed} subtests passed'
+            else:
+                return f'{passed}/{ran} subtests passed'
+        return ''
+
+    def _complete(self, returncode: int, res: TestResult,
+                  stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+        assert isinstance(res, TestResult)
+        if self.should_fail and res in (TestResult.OK, TestResult.FAIL):
+            res = TestResult.UNEXPECTEDPASS if res.is_ok() else TestResult.EXPECTEDFAIL
+
+        self.res = res
+        self.returncode = returncode
+        self.duration = time.time() - self.starttime
+        self.stdo = stdo
+        self.stde = stde
+
+    @property
+    def cmdline(self) -> T.Optional[str]:
+        if not self.cmd:
+            return None
+        test_only_env = set(self.env.items()) - set(os.environ.items())
+        return env_tuple_to_str(test_only_env) + \
+            ' '.join(sh_quote(x) for x in self.cmd)
+
+    def complete_skip(self, message: str) -> None:
+        self.starttime = time.time()
+        self._complete(GNU_SKIP_RETURNCODE, TestResult.SKIP, message, None)
+
+    def complete(self, returncode: int, res: TestResult,
+                 stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+        self._complete(returncode, res, stdo, stde)
+
+    def get_log(self, colorize: bool = False, stderr_only: bool = False) -> str:
+        stdo = '' if stderr_only else self.stdo
+        if self.stde:
+            res = ''
+            if stdo:
+                res += mlog.cyan('stdout:').get_text(colorize) + '\n'
+                res += stdo
+                if res[-1:] != '\n':
+                    res += '\n'
+            res += mlog.cyan('stderr:').get_text(colorize) + '\n'
+            res += self.stde
+        else:
+            res = stdo
+        if res and res[-1:] != '\n':
+            res += '\n'
+        return res
+
+    @property
+    def needs_parsing(self) -> bool:
+        return False
+
+    async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> T.Tuple[TestResult, str]:
+        async for l in lines:
+            pass
+        return TestResult.OK, ''
+
+
+class TestRunExitCode(TestRun):
+
+    def complete(self, returncode: int, res: TestResult,
+                 stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+        if res:
+            pass
+        elif returncode == GNU_SKIP_RETURNCODE:
+            res = TestResult.SKIP
+        elif returncode == GNU_ERROR_RETURNCODE:
+            res = TestResult.ERROR
+        else:
+            res = TestResult.FAIL if bool(returncode) else TestResult.OK
+        super().complete(returncode, res, stdo, stde)
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.EXITCODE] = TestRunExitCode
+
+
+class TestRunGTest(TestRunExitCode):
+    def complete(self, returncode: int, res: TestResult,
+                 stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+        filename = f'{self.test.name}.xml'
+        if self.test.workdir:
+            filename = os.path.join(self.test.workdir, filename)
+
+        try:
+            self.junit = et.parse(filename)
+        except FileNotFoundError:
+            # This can happen if the test fails to run or complete for some
+            # reason, like the rpath for libgtest isn't properly set. ExitCode
+            # will handle the failure, don't generate a stacktrace.
+            pass
+
+        super().complete(returncode, res, stdo, stde)
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.GTEST] = TestRunGTest
+
+
+class TestRunTAP(TestRun):
+    @property
+    def needs_parsing(self) -> bool:
+        return True
+
+    def complete(self, returncode: int, res: TestResult,
+                 stdo: str, stde: str) -> None:
+        if returncode != 0 and not res.was_killed():
+            res = TestResult.ERROR
+            stde = stde or ''
+            stde += f'\n(test program exited with status code {returncode})'
+
+        super().complete(returncode, res, stdo, stde)
+
+    async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> T.Tuple[TestResult, str]:
+        res = TestResult.OK
+        error = ''
+
+        async for i in TAPParser().parse_async(lines):
+            if isinstance(i, TAPParser.Bailout):
+                res = TestResult.ERROR
+                harness.log_subtest(self, i.message, res)
+            elif isinstance(i, TAPParser.Test):
+                self.results.append(i)
+                if i.result.is_bad():
+                    res = TestResult.FAIL
+                harness.log_subtest(self, i.name or f'subtest {i.number}', i.result)
+            elif isinstance(i, TAPParser.Error):
+                error = '\nTAP parsing error: ' + i.message
+                res = TestResult.ERROR
+
+        if all(t.result is TestResult.SKIP for t in self.results):
+            # This includes the case where self.results is empty
+            res = TestResult.SKIP
+        return res, error
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.TAP] = TestRunTAP
+
+
+class TestRunRust(TestRun):
+    @property
+    def needs_parsing(self) -> bool:
+        return True
+
+    async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> T.Tuple[TestResult, str]:
+        def parse_res(n: int, name: str, result: str) -> TAPParser.Test:
+            if result == 'ok':
+                return TAPParser.Test(n, name, TestResult.OK, None)
+            elif result == 'ignored':
+                return TAPParser.Test(n, name, TestResult.SKIP, None)
+            elif result == 'FAILED':
+                return TAPParser.Test(n, name, TestResult.FAIL, None)
+            return TAPParser.Test(n, name, TestResult.ERROR,
+                                  f'Unsupported output from rust test: {result}')
+
+        n = 1
+        async for line in lines:
+            if line.startswith('test ') and not line.startswith('test result'):
+                _, name, _, result = line.rstrip().split(' ')
+                name = name.replace('::', '.')
+                t = parse_res(n, name, result)
+                self.results.append(t)
+                harness.log_subtest(self, name, t.result)
+                n += 1
+
+        if all(t.result is TestResult.SKIP for t in self.results):
+            # This includes the case where self.results is empty
+            return TestResult.SKIP, ''
+        elif any(t.result is TestResult.ERROR for t in self.results):
+            return TestResult.ERROR, ''
+        elif any(t.result is TestResult.FAIL for t in self.results):
+            return TestResult.FAIL, ''
+        return TestResult.OK, ''
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.RUST] = TestRunRust
+
+
+def decode(stream: T.Union[None, bytes]) -> str:
+    if stream is None:
+        return ''
+    try:
+        return stream.decode('utf-8')
+    except UnicodeDecodeError:
+        return stream.decode('iso-8859-1', errors='ignore')
+
+async def read_decode(reader: asyncio.StreamReader, console_mode: ConsoleUser) -> str:
+    stdo_lines = []
+    try:
+        while not reader.at_eof():
+            line = decode(await reader.readline())
+            stdo_lines.append(line)
+            if console_mode is ConsoleUser.STDOUT:
+                print(line, end='', flush=True)
+        return ''.join(stdo_lines)
+    except asyncio.CancelledError:
+        return ''.join(stdo_lines)
+
+# Extract lines out of the StreamReader.  Print them
+# along the way if requested, and at the end collect
+# them all into a future.
+async def read_decode_lines(reader: asyncio.StreamReader, q: 'asyncio.Queue[T.Optional[str]]',
+                            console_mode: ConsoleUser) -> str:
+    stdo_lines = []
+    try:
+        while not reader.at_eof():
+            line = decode(await reader.readline())
+            stdo_lines.append(line)
+            if console_mode is ConsoleUser.STDOUT:
+                print(line, end='', flush=True)
+            await q.put(line)
+        return ''.join(stdo_lines)
+    except asyncio.CancelledError:
+        return ''.join(stdo_lines)
+    finally:
+        await q.put(None)
+
+def run_with_mono(fname: str) -> bool:
+    return fname.endswith('.exe') and not (is_windows() or is_cygwin())
+
+def check_testdata(objs: T.List[TestSerialisation]) -> T.List[TestSerialisation]:
+    if not isinstance(objs, list):
+        raise MesonVersionMismatchException('', coredata_version)
+    for obj in objs:
+        if not isinstance(obj, TestSerialisation):
+            raise MesonVersionMismatchException('', coredata_version)
+        if not hasattr(obj, 'version'):
+            raise MesonVersionMismatchException('', coredata_version)
+        if major_versions_differ(obj.version, coredata_version):
+            raise MesonVersionMismatchException(obj.version, coredata_version)
+    return objs
+
+# Custom waiting primitives for asyncio
+
+async def try_wait_one(*awaitables: T.Any, timeout: T.Optional[T.Union[int, float]]) -> None:
+    """Wait for completion of one of the given futures, ignoring timeouts."""
+    await asyncio.wait(awaitables,
+                       timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
+
+async def queue_iter(q: 'asyncio.Queue[T.Optional[str]]') -> T.AsyncIterator[str]:
+    while True:
+        item = await q.get()
+        q.task_done()
+        if item is None:
+            break
+        yield item
+
+async def complete(future: asyncio.Future) -> None:
+    """Wait for completion of the given future, ignoring cancellation."""
+    try:
+        await future
+    except asyncio.CancelledError:
+        pass
+
+async def complete_all(futures: T.Iterable[asyncio.Future],
+                       timeout: T.Optional[T.Union[int, float]] = None) -> None:
+    """Wait for completion of all the given futures, ignoring cancellation.
+       If timeout is not None, raise an asyncio.TimeoutError after the given
+       time has passed.  asyncio.TimeoutError is only raised if some futures
+       have not completed and none have raised exceptions, even if timeout
+       is zero."""
+
+    def check_futures(futures: T.Iterable[asyncio.Future]) -> None:
+        # Raise exceptions if needed
+        left = False
+        for f in futures:
+            if not f.done():
+                left = True
+            elif not f.cancelled():
+                f.result()
+        if left:
+            raise asyncio.TimeoutError
+
+    # Python is silly and does not have a variant of asyncio.wait with an
+    # absolute time as deadline.
+    deadline = None if timeout is None else asyncio.get_event_loop().time() + timeout
+    while futures and (timeout is None or timeout > 0):
+        done, futures = await asyncio.wait(futures, timeout=timeout,
+                                           return_when=asyncio.FIRST_EXCEPTION)
+        check_futures(done)
+        if deadline:
+            timeout = deadline - asyncio.get_event_loop().time()
+
+    check_futures(futures)
+
+
+class TestSubprocess:
+    def __init__(self, p: asyncio.subprocess.Process,
+                 stdout: T.Optional[int], stderr: T.Optional[int],
+                 postwait_fn: T.Callable[[], None] = None):
+        self._process = p
+        self.stdout = stdout
+        self.stderr = stderr
+        self.stdo_task = None            # type: T.Optional[asyncio.Future[str]]
+        self.stde_task = None            # type: T.Optional[asyncio.Future[str]]
+        self.postwait_fn = postwait_fn   # type: T.Callable[[], None]
+        self.all_futures = []            # type: T.List[asyncio.Future]
+
+    def stdout_lines(self, console_mode: ConsoleUser) -> T.AsyncIterator[str]:
+        q = asyncio.Queue()              # type: asyncio.Queue[T.Optional[str]]
+        decode_coro = read_decode_lines(self._process.stdout, q, console_mode)
+        self.stdo_task = asyncio.ensure_future(decode_coro)
+        return queue_iter(q)
+
+    def communicate(self, console_mode: ConsoleUser) -> T.Tuple[T.Optional[T.Awaitable[str]],
+                                                                T.Optional[T.Awaitable[str]]]:
+        # asyncio.ensure_future ensures that printing can
+        # run in the background, even before it is awaited
+        if self.stdo_task is None and self.stdout is not None:
+            decode_coro = read_decode(self._process.stdout, console_mode)
+            self.stdo_task = asyncio.ensure_future(decode_coro)
+            self.all_futures.append(self.stdo_task)
+        if self.stderr is not None and self.stderr != asyncio.subprocess.STDOUT:
+            decode_coro = read_decode(self._process.stderr, console_mode)
+            self.stde_task = asyncio.ensure_future(decode_coro)
+            self.all_futures.append(self.stde_task)
+
+        return self.stdo_task, self.stde_task
+
+    async def _kill(self) -> T.Optional[str]:
+        # Python does not provide multiplatform support for
+        # killing a process and all its children so we need
+        # to roll our own.
+        p = self._process
+        try:
+            if is_windows():
+                subprocess.run(['taskkill', '/F', '/T', '/PID', str(p.pid)])
+            else:
+                # Send a termination signal to the process group that setsid()
+                # created - giving it a chance to perform any cleanup.
+                os.killpg(p.pid, signal.SIGTERM)
+
+                # Make sure the termination signal actually kills the process
+                # group, otherwise retry with a SIGKILL.
+                await try_wait_one(p.wait(), timeout=0.5)
+                if p.returncode is not None:
+                    return None
+
+                os.killpg(p.pid, signal.SIGKILL)
+
+            await try_wait_one(p.wait(), timeout=1)
+            if p.returncode is not None:
+                return None
+
+            # An earlier kill attempt has not worked for whatever reason.
+            # Try to kill it one last time with a direct call.
+            # If the process has spawned children, they will remain around.
+            p.kill()
+            await try_wait_one(p.wait(), timeout=1)
+            if p.returncode is not None:
+                return None
+            return 'Test process could not be killed.'
+        except ProcessLookupError:
+            # Sometimes (e.g. with Wine) this happens.  There's nothing
+            # we can do, probably the process already died so just wait
+            # for the event loop to pick that up.
+            await p.wait()
+            return None
+        finally:
+            if self.stdo_task:
+                self.stdo_task.cancel()
+            if self.stde_task:
+                self.stde_task.cancel()
+
+    async def wait(self, timeout: T.Optional[int]) -> T.Tuple[int, TestResult, T.Optional[str]]:
+        p = self._process
+        result = None
+        additional_error = None
+
+        self.all_futures.append(asyncio.ensure_future(p.wait()))
+        try:
+            await complete_all(self.all_futures, timeout=timeout)
+        except asyncio.TimeoutError:
+            additional_error = await self._kill()
+            result = TestResult.TIMEOUT
+        except asyncio.CancelledError:
+            # The main loop must have seen Ctrl-C.
+            additional_error = await self._kill()
+            result = TestResult.INTERRUPT
+        finally:
+            if self.postwait_fn:
+                self.postwait_fn()
+
+        return p.returncode or 0, result, additional_error
+
+class SingleTestRunner:
+
+    def __init__(self, test: TestSerialisation, env: T.Dict[str, str], name: str,
+                 options: argparse.Namespace):
+        self.test = test
+        self.options = options
+        self.cmd = self._get_cmd()
+
+        if self.cmd and self.test.extra_paths:
+            env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + env['PATH']
+            winecmd = []
+            for c in self.cmd:
+                winecmd.append(c)
+                if os.path.basename(c).startswith('wine'):
+                    env['WINEPATH'] = get_wine_shortpath(
+                        winecmd,
+                        ['Z:' + p for p in self.test.extra_paths] + env.get('WINEPATH', '').split(';')
+                    )
+                    break
+
+        # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
+        # (i.e., the test or the environment don't explicitly set it), set
+        # it ourselves. We do this unconditionally for regular tests
+        # because it is extremely useful to have.
+        # Setting MALLOC_PERTURB_="0" will completely disable this feature.
+        if ('MALLOC_PERTURB_' not in env or not env['MALLOC_PERTURB_']) and not options.benchmark:
+            env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
+
+        if self.options.gdb or self.test.timeout is None or self.test.timeout <= 0:
+            timeout = None
+        elif self.options.timeout_multiplier is None:
+            timeout = self.test.timeout
+        elif self.options.timeout_multiplier <= 0:
+            timeout = None
+        else:
+            timeout = self.test.timeout * self.options.timeout_multiplier
+
+        is_parallel = test.is_parallel and self.options.num_processes > 1 and not self.options.gdb
+        self.runobj = TestRun(test, env, name, timeout, is_parallel)
+
+        if self.options.gdb:
+            self.console_mode = ConsoleUser.GDB
+        elif self.options.verbose and not is_parallel and not self.runobj.needs_parsing:
+            self.console_mode = ConsoleUser.STDOUT
+        else:
+            self.console_mode = ConsoleUser.LOGGER
+
+    def _get_test_cmd(self) -> T.Optional[T.List[str]]:
+        if self.test.fname[0].endswith('.jar'):
+            return ['java', '-jar'] + self.test.fname
+        elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
+            return ['mono'] + self.test.fname
+        elif self.test.cmd_is_built and self.test.is_cross_built and self.test.needs_exe_wrapper:
+            if self.test.exe_runner is None:
+                # Can not run test on cross compiled executable
+                # because there is no execute wrapper.
+                return None
+            elif self.test.cmd_is_built:
+                # If the command is not built (ie, its a python script),
+                # then we don't check for the exe-wrapper
+                if not self.test.exe_runner.found():
+                    msg = ('The exe_wrapper defined in the cross file {!r} was not '
+                           'found. Please check the command and/or add it to PATH.')
+                    raise TestException(msg.format(self.test.exe_runner.name))
+                return self.test.exe_runner.get_command() + self.test.fname
+        return self.test.fname
+
+    def _get_cmd(self) -> T.Optional[T.List[str]]:
+        test_cmd = self._get_test_cmd()
+        if not test_cmd:
+            return None
+        return TestHarness.get_wrapper(self.options) + test_cmd
+
+    @property
+    def is_parallel(self) -> bool:
+        return self.runobj.is_parallel
+
+    @property
+    def visible_name(self) -> str:
+        return self.runobj.name
+
+    @property
+    def timeout(self) -> T.Optional[int]:
+        return self.runobj.timeout
+
+    async def run(self, harness: 'TestHarness') -> TestRun:
+        if self.cmd is None:
+            skip_stdout = 'Not run because can not execute cross compiled binaries.'
+            harness.log_start_test(self.runobj)
+            self.runobj.complete_skip(skip_stdout)
+        else:
+            cmd = self.cmd + self.test.cmd_args + self.options.test_args
+            self.runobj.start(cmd)
+            harness.log_start_test(self.runobj)
+            await self._run_cmd(harness, cmd)
+        return self.runobj
+
+    async def _run_subprocess(self, args: T.List[str], *,
+                              stdout: int, stderr: int,
+                              env: T.Dict[str, str], cwd: T.Optional[str]) -> TestSubprocess:
+        # Let gdb handle ^C instead of us
+        if self.options.gdb:
+            previous_sigint_handler = signal.getsignal(signal.SIGINT)
+            # Make the meson executable ignore SIGINT while gdb is running.
+            signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+        def preexec_fn() -> None:
+            if self.options.gdb:
+                # Restore the SIGINT handler for the child process to
+                # ensure it can handle it.
+                signal.signal(signal.SIGINT, signal.SIG_DFL)
+            else:
+                # We don't want setsid() in gdb because gdb needs the
+                # terminal in order to handle ^C and not show tcsetpgrp()
+                # errors avoid not being able to use the terminal.
+                os.setsid()
+
+        def postwait_fn() -> None:
+            if self.options.gdb:
+                # Let us accept ^C again
+                signal.signal(signal.SIGINT, previous_sigint_handler)
+
+        p = await asyncio.create_subprocess_exec(*args,
+                                                 stdout=stdout,
+                                                 stderr=stderr,
+                                                 env=env,
+                                                 cwd=cwd,
+                                                 preexec_fn=preexec_fn if not is_windows() else None)
+        return TestSubprocess(p, stdout=stdout, stderr=stderr,
+                              postwait_fn=postwait_fn if not is_windows() else None)
+
+    async def _run_cmd(self, harness: 'TestHarness', cmd: T.List[str]) -> None:
+        if self.console_mode is ConsoleUser.GDB:
+            stdout = None
+            stderr = None
+        else:
+            stdout = asyncio.subprocess.PIPE
+            stderr = asyncio.subprocess.STDOUT \
+                if not self.options.split and not self.runobj.needs_parsing \
+                else asyncio.subprocess.PIPE
+
+        extra_cmd = []  # type: T.List[str]
+        if self.test.protocol is TestProtocol.GTEST:
+            gtestname = self.test.name
+            if self.test.workdir:
+                gtestname = os.path.join(self.test.workdir, self.test.name)
+            extra_cmd.append(f'--gtest_output=xml:{gtestname}.xml')
+
+        p = await self._run_subprocess(cmd + extra_cmd,
+                                       stdout=stdout,
+                                       stderr=stderr,
+                                       env=self.runobj.env,
+                                       cwd=self.test.workdir)
+
+        parse_task = None
+        if self.runobj.needs_parsing:
+            parse_coro = self.runobj.parse(harness, p.stdout_lines(self.console_mode))
+            parse_task = asyncio.ensure_future(parse_coro)
+
+        stdo_task, stde_task = p.communicate(self.console_mode)
+        returncode, result, additional_error = await p.wait(self.runobj.timeout)
+
+        if parse_task is not None:
+            res, error = await parse_task
+            if error:
+                additional_error = join_lines(additional_error, error)
+            result = result or res
+
+        stdo = await stdo_task if stdo_task else ''
+        stde = await stde_task if stde_task else ''
+        stde = join_lines(stde, additional_error)
+        self.runobj.complete(returncode, result, stdo, stde)
+
+
+class TestHarness:
+    def __init__(self, options: argparse.Namespace):
+        self.options = options
+        self.collected_failures = []  # type: T.List[TestRun]
+        self.fail_count = 0
+        self.expectedfail_count = 0
+        self.unexpectedpass_count = 0
+        self.success_count = 0
+        self.skip_count = 0
+        self.timeout_count = 0
+        self.test_count = 0
+        self.name_max_len = 0
+        self.is_run = False
+        self.loggers = []         # type: T.List[TestLogger]
+        self.loggers.append(ConsoleLogger())
+        self.need_console = False
+
+        self.logfile_base = None  # type: T.Optional[str]
+        if self.options.logbase and not self.options.gdb:
+            namebase = None
+            self.logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
+
+            if self.options.wrapper:
+                namebase = os.path.basename(self.get_wrapper(self.options)[0])
+            elif self.options.setup:
+                namebase = self.options.setup.replace(":", "_")
+
+            if namebase:
+                self.logfile_base += '-' + namebase.replace(' ', '_')
+
+        startdir = os.getcwd()
+        try:
+            os.chdir(self.options.wd)
+            self.build_data = build.load(os.getcwd())
+            if not self.options.setup:
+                self.options.setup = self.build_data.test_setup_default_name
+            if self.options.benchmark:
+                self.tests = self.load_tests('meson_benchmark_setup.dat')
+            else:
+                self.tests = self.load_tests('meson_test_setup.dat')
+        finally:
+            os.chdir(startdir)
+
+        ss = set()
+        for t in self.tests:
+            for s in t.suite:
+                ss.add(s)
+        self.suites = list(ss)
+
+    def load_tests(self, file_name: str) -> T.List[TestSerialisation]:
+        datafile = Path('meson-private') / file_name
+        if not datafile.is_file():
+            raise TestException(f'Directory {self.options.wd!r} does not seem to be a Meson build directory.')
+        with datafile.open('rb') as f:
+            objs = check_testdata(pickle.load(f))
+        return objs
+
+    def __enter__(self) -> 'TestHarness':
+        return self
+
+    def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+        self.close_logfiles()
+
+    def close_logfiles(self) -> None:
+        for l in self.loggers:
+            l.close()
+
+    def get_test_setup(self, test: T.Optional[TestSerialisation]) -> build.TestSetup:
+        if ':' in self.options.setup:
+            if self.options.setup not in self.build_data.test_setups:
+                sys.exit(f"Unknown test setup '{self.options.setup}'.")
+            return self.build_data.test_setups[self.options.setup]
+        else:
+            full_name = test.project_name + ":" + self.options.setup
+            if full_name not in self.build_data.test_setups:
+                sys.exit(f"Test setup '{self.options.setup}' not found from project '{test.project_name}'.")
+            return self.build_data.test_setups[full_name]
+
+    def merge_setup_options(self, options: argparse.Namespace, test: TestSerialisation) -> T.Dict[str, str]:
+        current = self.get_test_setup(test)
+        if not options.gdb:
+            options.gdb = current.gdb
+        if options.gdb:
+            options.verbose = True
+        if options.timeout_multiplier is None:
+            options.timeout_multiplier = current.timeout_multiplier
+    #    if options.env is None:
+    #        options.env = current.env # FIXME, should probably merge options here.
+        if options.wrapper is None:
+            options.wrapper = current.exe_wrapper
+        elif current.exe_wrapper:
+            sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
+        return current.env.get_env(os.environ.copy())
+
+    def get_test_runner(self, test: TestSerialisation) -> SingleTestRunner:
+        name = self.get_pretty_suite(test)
+        options = deepcopy(self.options)
+        if self.options.setup:
+            env = self.merge_setup_options(options, test)
+        else:
+            env = os.environ.copy()
+        test_env = test.env.get_env(env)
+        env.update(test_env)
+        if (test.is_cross_built and test.needs_exe_wrapper and
+                test.exe_runner and test.exe_runner.found()):
+            env['MESON_EXE_WRAPPER'] = join_args(test.exe_runner.get_command())
+        return SingleTestRunner(test, env, name, options)
+
+    def process_test_result(self, result: TestRun) -> None:
+        if result.res is TestResult.TIMEOUT:
+            self.timeout_count += 1
+        elif result.res is TestResult.SKIP:
+            self.skip_count += 1
+        elif result.res is TestResult.OK:
+            self.success_count += 1
+        elif result.res in {TestResult.FAIL, TestResult.ERROR, TestResult.INTERRUPT}:
+            self.fail_count += 1
+        elif result.res is TestResult.EXPECTEDFAIL:
+            self.expectedfail_count += 1
+        elif result.res is TestResult.UNEXPECTEDPASS:
+            self.unexpectedpass_count += 1
+        else:
+            sys.exit(f'Unknown test result encountered: {result.res}')
+
+        if result.res.is_bad():
+            self.collected_failures.append(result)
+        for l in self.loggers:
+            l.log(self, result)
+
+    @property
+    def numlen(self) -> int:
+        return len(str(self.test_count))
+
+    @property
+    def max_left_width(self) -> int:
+        return 2 * self.numlen + 2
+
+    def format(self, result: TestRun, colorize: bool,
+               max_left_width: int = 0,
+               prefix: str = '',
+               left: T.Optional[str] = None,
+               middle: T.Optional[str] = None,
+               right: T.Optional[str] = None) -> str:
+
+        if left is None:
+            left = '{num:{numlen}}/{testcount} '.format(
+                numlen=self.numlen,
+                num=result.num,
+                testcount=self.test_count)
+
+        # A non-default max_left_width lets the logger print more stuff before the
+        # name, while ensuring that the rightmost columns remain aligned.
+        max_left_width = max(max_left_width, self.max_left_width)
+
+        if middle is None:
+            middle = result.name
+        extra_mid_width = max_left_width + self.name_max_len + 1 - uniwidth(middle) - uniwidth(left) - uniwidth(prefix)
+        middle += ' ' * max(1, extra_mid_width)
+
+        if right is None:
+            right = '{res} {dur:{durlen}.2f}s'.format(
+                res=result.res.get_text(colorize),
+                dur=result.duration,
+                durlen=self.duration_max_len + 3)
+            detail = result.detail
+            if detail:
+                right += '   ' + detail
+        return prefix + left + middle + right
+
+    def summary(self) -> str:
+        return textwrap.dedent('''
+
+            Ok:                 {:<4}
+            Expected Fail:      {:<4}
+            Fail:               {:<4}
+            Unexpected Pass:    {:<4}
+            Skipped:            {:<4}
+            Timeout:            {:<4}
+            ''').format(self.success_count, self.expectedfail_count, self.fail_count,
+                        self.unexpectedpass_count, self.skip_count, self.timeout_count)
+
+    def total_failure_count(self) -> int:
+        return self.fail_count + self.unexpectedpass_count + self.timeout_count
+
+    def doit(self) -> int:
+        if self.is_run:
+            raise RuntimeError('Test harness object can only be used once.')
+        self.is_run = True
+        tests = self.get_tests()
+        if not tests:
+            return 0
+        if not self.options.no_rebuild and not rebuild_deps(self.options.wd, tests):
+            # We return 125 here in case the build failed.
+            # The reason is that exit code 125 tells `git bisect run` that the current
+            # commit should be skipped.  Thus users can directly use `meson test` to
+            # bisect without needing to handle the does-not-build case separately in a
+            # wrapper script.
+            sys.exit(125)
+
+        self.name_max_len = max([uniwidth(self.get_pretty_suite(test)) for test in tests])
+        startdir = os.getcwd()
+        try:
+            os.chdir(self.options.wd)
+            runners = []             # type: T.List[SingleTestRunner]
+            for i in range(self.options.repeat):
+                runners.extend(self.get_test_runner(test) for test in tests)
+                if i == 0:
+                    self.duration_max_len = max([len(str(int(runner.timeout or 99)))
+                                                 for runner in runners])
+                    # Disable the progress report if it gets in the way
+                    self.need_console = any(runner.console_mode is not ConsoleUser.LOGGER
+                                             for runner in runners)
+
+            self.test_count = len(runners)
+            self.run_tests(runners)
+        finally:
+            os.chdir(startdir)
+        return self.total_failure_count()
+
+    @staticmethod
+    def split_suite_string(suite: str) -> T.Tuple[str, str]:
+        if ':' in suite:
+            split = suite.split(':', 1)
+            assert len(split) == 2
+            return split[0], split[1]
+        else:
+            return suite, ""
+
+    @staticmethod
+    def test_in_suites(test: TestSerialisation, suites: T.List[str]) -> bool:
+        for suite in suites:
+            (prj_match, st_match) = TestHarness.split_suite_string(suite)
+            for prjst in test.suite:
+                (prj, st) = TestHarness.split_suite_string(prjst)
+
+                # the SUITE can be passed as
+                #     suite_name
+                # or
+                #     project_name:suite_name
+                # so we need to select only the test belonging to project_name
+
+                # this if handle the first case (i.e., SUITE == suite_name)
+
+                # in this way we can run tests belonging to different
+                # (sub)projects which share the same suite_name
+                if not st_match and st == prj_match:
+                    return True
+
+                # these two conditions are needed to handle the second option
+                # i.e., SUITE == project_name:suite_name
+
+                # in this way we select the only the tests of
+                # project_name with suite_name
+                if prj_match and prj != prj_match:
+                    continue
+                if st_match and st != st_match:
+                    continue
+                return True
+        return False
+
+    def test_suitable(self, test: TestSerialisation) -> bool:
+        if TestHarness.test_in_suites(test, self.options.exclude_suites):
+            return False
+
+        if self.options.include_suites:
+            # Both force inclusion (overriding add_test_setup) and exclude
+            # everything else
+            return TestHarness.test_in_suites(test, self.options.include_suites)
+
+        if self.options.setup:
+            setup = self.get_test_setup(test)
+            if TestHarness.test_in_suites(test, setup.exclude_suites):
+                return False
+
+        return True
+
+    def tests_from_args(self, tests: T.List[TestSerialisation]) -> T.Generator[TestSerialisation, None, None]:
+        '''
+        Allow specifying test names like "meson test foo1 foo2", where test('foo1', ...)
+
+        Also support specifying the subproject to run tests from like
+        "meson test subproj:" (all tests inside subproj) or "meson test subproj:foo1"
+        to run foo1 inside subproj. Coincidentally also "meson test :foo1" to
+        run all tests with that name across all subprojects, which is
+        identical to "meson test foo1"
+        '''
+        for arg in self.options.args:
+            if ':' in arg:
+                subproj, name = arg.split(':', maxsplit=1)
+            else:
+                subproj, name = '', arg
+            for t in tests:
+                if subproj and t.project_name != subproj:
+                    continue
+                if name and t.name != name:
+                    continue
+                yield t
+
+    def get_tests(self) -> T.List[TestSerialisation]:
+        if not self.tests:
+            print('No tests defined.')
+            return []
+
+        tests = [t for t in self.tests if self.test_suitable(t)]
+        if self.options.args:
+            tests = list(self.tests_from_args(tests))
+
+        if not tests:
+            print('No suitable tests defined.')
+            return []
+
+        return tests
+
+    def flush_logfiles(self) -> None:
+        for l in self.loggers:
+            l.flush()
+
+    def open_logfiles(self) -> None:
+        if not self.logfile_base:
+            return
+
+        self.loggers.append(JunitBuilder(self.logfile_base + '.junit.xml'))
+        self.loggers.append(JsonLogfileBuilder(self.logfile_base + '.json'))
+        self.loggers.append(TextLogfileBuilder(self.logfile_base + '.txt', errors='surrogateescape'))
+
+    @staticmethod
+    def get_wrapper(options: argparse.Namespace) -> T.List[str]:
+        wrap = []  # type: T.List[str]
+        if options.gdb:
+            wrap = [options.gdb_path, '--quiet', '--nh']
+            if options.repeat > 1:
+                wrap += ['-ex', 'run', '-ex', 'quit']
+            # Signal the end of arguments to gdb
+            wrap += ['--args']
+        if options.wrapper:
+            wrap += options.wrapper
+        return wrap
+
+    def get_pretty_suite(self, test: TestSerialisation) -> str:
+        if len(self.suites) > 1 and test.suite:
+            rv = TestHarness.split_suite_string(test.suite[0])[0]
+            s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
+            if s:
+                rv += ":"
+            return rv + s + " / " + test.name
+        else:
+            return test.name
+
+    def run_tests(self, runners: T.List[SingleTestRunner]) -> None:
+        try:
+            self.open_logfiles()
+            # Replace with asyncio.run once we can require Python 3.7
+            loop = asyncio.get_event_loop()
+            loop.run_until_complete(self._run_tests(runners))
+        finally:
+            self.close_logfiles()
+
+    def log_subtest(self, test: TestRun, s: str, res: TestResult) -> None:
+        for l in self.loggers:
+            l.log_subtest(self, test, s, res)
+
+    def log_start_test(self, test: TestRun) -> None:
+        for l in self.loggers:
+            l.start_test(self, test)
+
+    async def _run_tests(self, runners: T.List[SingleTestRunner]) -> None:
+        semaphore = asyncio.Semaphore(self.options.num_processes)
+        futures = deque()  # type: T.Deque[asyncio.Future]
+        running_tests = dict() # type: T.Dict[asyncio.Future, str]
+        interrupted = False
+        ctrlc_times = deque(maxlen=MAX_CTRLC) # type: T.Deque[float]
+
+        async def run_test(test: SingleTestRunner) -> None:
+            async with semaphore:
+                if interrupted or (self.options.repeat > 1 and self.fail_count):
+                    return
+                res = await test.run(self)
+                self.process_test_result(res)
+
+        def test_done(f: asyncio.Future) -> None:
+            if not f.cancelled():
+                f.result()
+            futures.remove(f)
+            try:
+                del running_tests[f]
+            except KeyError:
+                pass
+
+        def cancel_one_test(warn: bool) -> None:
+            future = futures.popleft()
+            futures.append(future)
+            if warn:
+                self.flush_logfiles()
+                mlog.warning('CTRL-C detected, interrupting {}'.format(running_tests[future]))
+            del running_tests[future]
+            future.cancel()
+
+        def cancel_all_tests() -> None:
+            nonlocal interrupted
+            interrupted = True
+            while running_tests:
+                cancel_one_test(False)
+
+        def sigterm_handler() -> None:
+            if interrupted:
+                return
+            self.flush_logfiles()
+            mlog.warning('Received SIGTERM, exiting')
+            cancel_all_tests()
+
+        def sigint_handler() -> None:
+            # We always pick the longest-running future that has not been cancelled
+            # If all the tests have been CTRL-C'ed, just stop
+            nonlocal interrupted
+            if interrupted:
+                return
+            ctrlc_times.append(asyncio.get_event_loop().time())
+            if len(ctrlc_times) == MAX_CTRLC and ctrlc_times[-1] - ctrlc_times[0] < 1:
+                self.flush_logfiles()
+                mlog.warning('CTRL-C detected, exiting')
+                cancel_all_tests()
+            elif running_tests:
+                cancel_one_test(True)
+            else:
+                self.flush_logfiles()
+                mlog.warning('CTRL-C detected, exiting')
+                interrupted = True
+
+        for l in self.loggers:
+            l.start(self)
+
+        if sys.platform != 'win32':
+            if os.getpgid(0) == os.getpid():
+                asyncio.get_event_loop().add_signal_handler(signal.SIGINT, sigint_handler)
+            else:
+                asyncio.get_event_loop().add_signal_handler(signal.SIGINT, sigterm_handler)
+            asyncio.get_event_loop().add_signal_handler(signal.SIGTERM, sigterm_handler)
+        try:
+            for runner in runners:
+                if not runner.is_parallel:
+                    await complete_all(futures)
+                future = asyncio.ensure_future(run_test(runner))
+                futures.append(future)
+                running_tests[future] = runner.visible_name
+                future.add_done_callback(test_done)
+                if not runner.is_parallel:
+                    await complete(future)
+                if self.options.repeat > 1 and self.fail_count:
+                    break
+
+            await complete_all(futures)
+        finally:
+            if sys.platform != 'win32':
+                asyncio.get_event_loop().remove_signal_handler(signal.SIGINT)
+                asyncio.get_event_loop().remove_signal_handler(signal.SIGTERM)
+            for l in self.loggers:
+                await l.finish(self)
+
+def list_tests(th: TestHarness) -> bool:
+    tests = th.get_tests()
+    for t in tests:
+        print(th.get_pretty_suite(t))
+    return not tests
+
+def rebuild_deps(wd: str, tests: T.List[TestSerialisation]) -> bool:
+    def convert_path_to_target(path: str) -> str:
+        path = os.path.relpath(path, wd)
+        if os.sep != '/':
+            path = path.replace(os.sep, '/')
+        return path
+
+    if not (Path(wd) / 'build.ninja').is_file():
+        print('Only ninja backend is supported to rebuild tests before running them.')
+        return True
+
+    ninja = environment.detect_ninja()
+    if not ninja:
+        print("Can't find ninja, can't rebuild test.")
+        return False
+
+    depends = set()            # type: T.Set[str]
+    targets = set()            # type: T.Set[str]
+    intro_targets = dict()     # type: T.Dict[str, T.List[str]]
+    for target in load_info_file(get_infodir(wd), kind='targets'):
+        intro_targets[target['id']] = [
+            convert_path_to_target(f)
+            for f in target['filename']]
+    for t in tests:
+        for d in t.depends:
+            if d in depends:
+                continue
+            depends.update(d)
+            targets.update(intro_targets[d])
+
+    ret = subprocess.run(ninja + ['-C', wd] + sorted(targets)).returncode
+    if ret != 0:
+        print(f'Could not rebuild {wd}')
+        return False
+
+    return True
+
+def run(options: argparse.Namespace) -> int:
+    if options.benchmark:
+        options.num_processes = 1
+
+    if options.verbose and options.quiet:
+        print('Can not be both quiet and verbose at the same time.')
+        return 1
+
+    check_bin = None
+    if options.gdb:
+        options.verbose = True
+        if options.wrapper:
+            print('Must not specify both a wrapper and gdb at the same time.')
+            return 1
+        check_bin = 'gdb'
+
+    if options.wrapper:
+        check_bin = options.wrapper[0]
+
+    if sys.platform == 'win32':
+        loop = asyncio.ProactorEventLoop()
+        asyncio.set_event_loop(loop)
+
+    if check_bin is not None:
+        exe = ExternalProgram(check_bin, silent=True)
+        if not exe.found():
+            print(f'Could not find requested program: {check_bin!r}')
+            return 1
+
+    with TestHarness(options) as th:
+        try:
+            if options.list:
+                return list_tests(th)
+            return th.doit()
+        except TestException as e:
+            print('Meson test encountered an error:\n')
+            if os.environ.get('MESON_FORCE_BACKTRACE'):
+                raise e
+            else:
+                print(e)
+            return 1
+
+def run_with_args(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser(prog='meson test')
+    add_arguments(parser)
+    options = parser.parse_args(args)
+    return run(options)
diff --git a/meson/mesonbuild/munstable_coredata.py b/meson/mesonbuild/munstable_coredata.py
new file mode 100644
index 000000000..0ca8f3398
--- /dev/null
+++ b/meson/mesonbuild/munstable_coredata.py
@@ -0,0 +1,114 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from . import coredata as cdata
+from .mesonlib import MachineChoice, OptionKey
+
+import os.path
+import pprint
+import textwrap
+
+def add_arguments(parser):
+    parser.add_argument('--all', action='store_true', dest='all', default=False,
+                        help='Show data not used by current backend.')
+
+    parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+
+def dump_compilers(compilers):
+    for lang, compiler in compilers.items():
+        print('  ' + lang + ':')
+        print('      Id: ' + compiler.id)
+        print('      Command: ' + ' '.join(compiler.exelist))
+        if compiler.full_version:
+            print('      Full version: ' + compiler.full_version)
+        if compiler.version:
+            print('      Detected version: ' + compiler.version)
+
+
+def dump_guids(d):
+    for name, value in d.items():
+        print('  ' + name + ': ' + value)
+
+
+def run(options):
+    datadir = 'meson-private'
+    if options.builddir is not None:
+        datadir = os.path.join(options.builddir, datadir)
+    if not os.path.isdir(datadir):
+        print('Current directory is not a build dir. Please specify it or '
+              'change the working directory to it.')
+        return 1
+
+    all_backends = options.all
+
+    print('This is a dump of the internal unstable cache of meson. This is for debugging only.')
+    print('Do NOT parse, this will change from version to version in incompatible ways')
+    print('')
+
+    coredata = cdata.load(options.builddir)
+    backend = coredata.get_option(OptionKey('backend'))
+    for k, v in sorted(coredata.__dict__.items()):
+        if k in ('backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'):
+            # use `meson configure` to view these
+            pass
+        elif k in ['install_guid', 'test_guid', 'regen_guid']:
+            if all_backends or backend.startswith('vs'):
+                print(k + ': ' + v)
+        elif k == 'target_guids':
+            if all_backends or backend.startswith('vs'):
+                print(k + ':')
+                dump_guids(v)
+        elif k in ['lang_guids']:
+            if all_backends or backend.startswith('vs') or backend == 'xcode':
+                print(k + ':')
+                dump_guids(v)
+        elif k == 'meson_command':
+            if all_backends or backend.startswith('vs'):
+                print('Meson command used in build file regeneration: ' + ' '.join(v))
+        elif k == 'pkgconf_envvar':
+            print('Last seen PKGCONFIG environment variable value: ' + v)
+        elif k == 'version':
+            print('Meson version: ' + v)
+        elif k == 'cross_files':
+            if v:
+                print('Cross File: ' + ' '.join(v))
+        elif k == 'config_files':
+            if v:
+                print('Native File: ' + ' '.join(v))
+        elif k == 'compilers':
+            for for_machine in MachineChoice:
+                print('Cached {} machine compilers:'.format(
+                    for_machine.get_lower_case_name()))
+                dump_compilers(v[for_machine])
+        elif k == 'deps':
+            def print_dep(dep_key, dep):
+                print('  ' + dep_key[0] + ": ")
+                print('      compile args: ' + repr(dep.get_compile_args()))
+                print('      link args: ' + repr(dep.get_link_args()))
+                if dep.get_sources():
+                    print('      sources: ' + repr(dep.get_sources()))
+                print('      version: ' + repr(dep.get_version()))
+
+            for for_machine in iter(MachineChoice):
+                items_list = list(sorted(v[for_machine].items()))
+                if items_list:
+                    print('Cached dependencies for {} machine' % for_machine.get_lower_case_name())
+                    for dep_key, deps in items_list:
+                        for dep in deps:
+                            print_dep(dep_key, dep)
+        else:
+            print(k + ':')
+            print(textwrap.indent(pprint.pformat(v), '  '))
diff --git a/meson/mesonbuild/optinterpreter.py b/meson/mesonbuild/optinterpreter.py
new file mode 100644
index 000000000..c91363df0
--- /dev/null
+++ b/meson/mesonbuild/optinterpreter.py
@@ -0,0 +1,234 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import functools
+import typing as T
+
+from . import coredata
+from . import mesonlib
+from . import mparser
+from . import mlog
+from .interpreterbase import FeatureNew
+
+if T.TYPE_CHECKING:
+    from .interpreterbase import TV_func
+
+class OptionException(mesonlib.MesonException):
+    pass
+
+
+def permitted_kwargs(permitted: T.Set[str]) -> T.Callable[..., T.Any]:
+    """Function that validates kwargs for options."""
+    def _wraps(func: 'TV_func') -> 'TV_func':
+        @functools.wraps(func)
+        def _inner(name: str, description: str, kwargs: T.Dict[str, T.Any]) -> T.Any:
+            bad = [a for a in kwargs.keys() if a not in permitted]
+            if bad:
+                raise OptionException('Invalid kwargs for option "{}": "{}"'.format(
+                    name, ' '.join(bad)))
+            return func(description, kwargs)
+        return T.cast('TV_func', _inner)
+    return _wraps
+
+
+optname_regex = re.compile('[^a-zA-Z0-9_-]')
+
+@permitted_kwargs({'value', 'yield'})
+def string_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserStringOption:
+    return coredata.UserStringOption(description,
+                                     kwargs.get('value', ''),
+                                     kwargs.get('yield', coredata.default_yielding))
+
+@permitted_kwargs({'value', 'yield'})
+def boolean_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserBooleanOption:
+    return coredata.UserBooleanOption(description,
+                                      kwargs.get('value', True),
+                                      kwargs.get('yield', coredata.default_yielding))
+
+@permitted_kwargs({'value', 'yield', 'choices'})
+def combo_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserComboOption:
+    if 'choices' not in kwargs:
+        raise OptionException('Combo option missing "choices" keyword.')
+    choices = kwargs['choices']
+    if not isinstance(choices, list):
+        raise OptionException('Combo choices must be an array.')
+    for i in choices:
+        if not isinstance(i, str):
+            raise OptionException('Combo choice elements must be strings.')
+    return coredata.UserComboOption(description,
+                                    choices,
+                                    kwargs.get('value', choices[0]),
+                                    kwargs.get('yield', coredata.default_yielding),)
+
+
+@permitted_kwargs({'value', 'min', 'max', 'yield'})
+def integer_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserIntegerOption:
+    if 'value' not in kwargs:
+        raise OptionException('Integer option must contain value argument.')
+    inttuple = (kwargs.get('min', None), kwargs.get('max', None), kwargs['value'])
+    return coredata.UserIntegerOption(description,
+                                      inttuple,
+                                      kwargs.get('yield', coredata.default_yielding))
+
+# FIXME: Cannot use FeatureNew while parsing options because we parse it before
+# reading options in project(). See func_project() in interpreter.py
+#@FeatureNew('array type option()', '0.44.0')
+@permitted_kwargs({'value', 'yield', 'choices'})
+def string_array_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserArrayOption:
+    if 'choices' in kwargs:
+        choices = kwargs['choices']
+        if not isinstance(choices, list):
+            raise OptionException('Array choices must be an array.')
+        for i in choices:
+            if not isinstance(i, str):
+                raise OptionException('Array choice elements must be strings.')
+        value = kwargs.get('value', choices)
+    else:
+        choices = None
+        value = kwargs.get('value', [])
+    if not isinstance(value, list):
+        raise OptionException('Array choices must be passed as an array.')
+    return coredata.UserArrayOption(description,
+                                    value,
+                                    choices=choices,
+                                    yielding=kwargs.get('yield', coredata.default_yielding))
+
+@permitted_kwargs({'value', 'yield'})
+def feature_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserFeatureOption:
+    return coredata.UserFeatureOption(description,
+                                      kwargs.get('value', 'auto'),
+                                      yielding=kwargs.get('yield', coredata.default_yielding))
+
+option_types = {'string': string_parser,
+                'boolean': boolean_parser,
+                'combo': combo_parser,
+                'integer': integer_parser,
+                'array': string_array_parser,
+                'feature': feature_parser,
+                } # type: T.Dict[str, T.Callable[[str, str, T.Dict[str, T.Any]], coredata.UserOption]]
+
+class OptionInterpreter:
+    def __init__(self, subproject: str) -> None:
+        self.options: 'coredata.KeyedOptionDictType' = {}
+        self.subproject = subproject
+
+    def process(self, option_file: str) -> None:
+        try:
+            with open(option_file, encoding='utf-8') as f:
+                ast = mparser.Parser(f.read(), option_file).parse()
+        except mesonlib.MesonException as me:
+            me.file = option_file
+            raise me
+        if not isinstance(ast, mparser.CodeBlockNode):
+            e = OptionException('Option file is malformed.')
+            e.lineno = ast.lineno()
+            e.file = option_file
+            raise e
+        for cur in ast.lines:
+            try:
+                self.evaluate_statement(cur)
+            except mesonlib.MesonException as e:
+                e.lineno = cur.lineno
+                e.colno = cur.colno
+                e.file = option_file
+                raise e
+            except Exception as e:
+                raise mesonlib.MesonException(
+                    str(e), lineno=cur.lineno, colno=cur.colno, file=option_file)
+
+    def reduce_single(self, arg: T.Union[str, mparser.BaseNode]) -> T.Union[str, int, bool, T.Sequence[T.Union[str, int, bool]]]:
+        if isinstance(arg, str):
+            return arg
+        elif isinstance(arg, (mparser.StringNode, mparser.BooleanNode,
+                              mparser.NumberNode)):
+            return arg.value
+        elif isinstance(arg, mparser.ArrayNode):
+            lr = [self.reduce_single(curarg) for curarg in arg.args.arguments]
+            # mypy really struggles with recursive flattening, help it out
+            return T.cast(T.Sequence[T.Union[str, int, bool]], lr)
+        elif isinstance(arg, mparser.UMinusNode):
+            res = self.reduce_single(arg.value)
+            if not isinstance(res, (int, float)):
+                raise OptionException('Token after "-" is not a number')
+            FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject)
+            return -res
+        elif isinstance(arg, mparser.NotNode):
+            res = self.reduce_single(arg.value)
+            if not isinstance(res, bool):
+                raise OptionException('Token after "not" is not a a boolean')
+            FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject)
+            return not res
+        elif isinstance(arg, mparser.ArithmeticNode):
+            l = self.reduce_single(arg.left)
+            r = self.reduce_single(arg.right)
+            if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)):
+                raise OptionException('Only string concatenation with the "+" operator is allowed')
+            FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject)
+            return l + r
+        else:
+            raise OptionException('Arguments may only be string, int, bool, or array of those.')
+
+    def reduce_arguments(self, args: mparser.ArgumentNode) -> T.Tuple[
+            T.List[T.Union[str, int, bool, T.Sequence[T.Union[str, int, bool]]]],
+            T.Dict[str, T.Union[str, int, bool, T.Sequence[T.Union[str, int, bool]]]]]:
+        if args.incorrect_order():
+            raise OptionException('All keyword arguments must be after positional arguments.')
+        reduced_pos = [self.reduce_single(arg) for arg in args.arguments]
+        reduced_kw = {}
+        for key in args.kwargs.keys():
+            if not isinstance(key, mparser.IdNode):
+                raise OptionException('Keyword argument name is not a string.')
+            a = args.kwargs[key]
+            reduced_kw[key.value] = self.reduce_single(a)
+        return reduced_pos, reduced_kw
+
+    def evaluate_statement(self, node: mparser.BaseNode) -> None:
+        if not isinstance(node, mparser.FunctionNode):
+            raise OptionException('Option file may only contain option definitions')
+        func_name = node.func_name
+        if func_name != 'option':
+            raise OptionException('Only calls to option() are allowed in option files.')
+        (posargs, kwargs) = self.reduce_arguments(node.args)
+
+        if len(posargs) != 1:
+            raise OptionException('Option() must have one (and only one) positional argument')
+        opt_name = posargs[0]
+        if not isinstance(opt_name, str):
+            raise OptionException('Positional argument must be a string.')
+        if optname_regex.search(opt_name) is not None:
+            raise OptionException('Option names can only contain letters, numbers or dashes.')
+        key = mesonlib.OptionKey.from_string(opt_name).evolve(subproject=self.subproject)
+        if not key.is_project():
+            raise OptionException('Option name %s is reserved.' % opt_name)
+
+        if 'yield' in kwargs:
+            FeatureNew.single_use('option yield', '0.45.0', self.subproject)
+
+        if 'type' not in kwargs:
+            raise OptionException('Option call missing mandatory "type" keyword argument')
+        opt_type = kwargs.pop('type')
+        if not isinstance(opt_type, str):
+            raise OptionException('option() type must be a string')
+        if opt_type not in option_types:
+            raise OptionException('Unknown type %s.' % opt_type)
+
+        description = kwargs.pop('description', '')
+        if not isinstance(description, str):
+            raise OptionException('Option descriptions must be strings.')
+
+        opt = option_types[opt_type](opt_name, description, kwargs)
+        if opt.description == '':
+            opt.description = opt_name
+        self.options[key] = opt
diff --git a/meson/mesonbuild/programs.py b/meson/mesonbuild/programs.py
new file mode 100644
index 000000000..af27801f7
--- /dev/null
+++ b/meson/mesonbuild/programs.py
@@ -0,0 +1,386 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations and logic for External and Internal Programs."""
+
+import functools
+import os
+import shutil
+import stat
+import sys
+import re
+import typing as T
+from pathlib import Path
+
+from . import mesonlib
+from . import mlog
+from .mesonlib import MachineChoice
+
+if T.TYPE_CHECKING:
+    from .environment import Environment
+    from .interpreter import Interpreter
+
+
+class ExternalProgram(mesonlib.HoldableObject):
+
+    """A program that is found on the system."""
+
+    windows_exts = ('exe', 'msc', 'com', 'bat', 'cmd')
+    for_machine = MachineChoice.BUILD
+
+    def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
+                 silent: bool = False, search_dir: T.Optional[str] = None,
+                 extra_search_dirs: T.Optional[T.List[str]] = None):
+        self.name = name
+        self.path: T.Optional[str] = None
+        self.cached_version: T.Optional[str] = None
+        if command is not None:
+            self.command = mesonlib.listify(command)
+            if mesonlib.is_windows():
+                cmd = self.command[0]
+                args = self.command[1:]
+                # Check whether the specified cmd is a path to a script, in
+                # which case we need to insert the interpreter. If not, try to
+                # use it as-is.
+                ret = self._shebang_to_cmd(cmd)
+                if ret:
+                    self.command = ret + args
+                else:
+                    self.command = [cmd] + args
+        else:
+            all_search_dirs = [search_dir]
+            if extra_search_dirs:
+                all_search_dirs += extra_search_dirs
+            for d in all_search_dirs:
+                self.command = self._search(name, d)
+                if self.found():
+                    break
+
+        if self.found():
+            # Set path to be the last item that is actually a file (in order to
+            # skip options in something like ['python', '-u', 'file.py']. If we
+            # can't find any components, default to the last component of the path.
+            for arg in reversed(self.command):
+                if arg is not None and os.path.isfile(arg):
+                    self.path = arg
+                    break
+            else:
+                self.path = self.command[-1]
+
+        if not silent:
+            # ignore the warning because derived classes never call this __init__
+            # method, and thus only the found() method of this class is ever executed
+            if self.found():  # lgtm [py/init-calls-subclass]
+                mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
+                         '(%s)' % ' '.join(self.command))
+            else:
+                mlog.log('Program', mlog.bold(name), 'found:', mlog.red('NO'))
+
+    def summary_value(self) -> T.Union[str, mlog.AnsiDecorator]:
+        if not self.found():
+            return mlog.red('NO')
+        return self.path
+
+    def __repr__(self) -> str:
+        r = '<{} {!r} -> {!r}>'
+        return r.format(self.__class__.__name__, self.name, self.command)
+
+    def description(self) -> str:
+        '''Human friendly description of the command'''
+        return ' '.join(self.command)
+
+    def get_version(self, interpreter: 'Interpreter') -> str:
+        if not self.cached_version:
+            raw_cmd = self.get_command() + ['--version']
+            cmd: T.List[T.Union[str, ExternalProgram]] = [self, '--version']
+            res = interpreter.run_command_impl(interpreter.current_node, cmd, {}, True)
+            if res.returncode != 0:
+                m = 'Running {!r} failed'
+                raise mesonlib.MesonException(m.format(raw_cmd))
+            output = res.stdout.strip()
+            if not output:
+                output = res.stderr.strip()
+            match = re.search(r'([0-9][0-9\.]+)', output)
+            if not match:
+                m = 'Could not find a version number in output of {!r}'
+                raise mesonlib.MesonException(m.format(raw_cmd))
+            self.cached_version = match.group(1)
+        return self.cached_version
+
+    @classmethod
+    def from_bin_list(cls, env: 'Environment', for_machine: MachineChoice, name: str) -> 'ExternalProgram':
+        # There is a static `for_machine` for this class because the binary
+        # aways runs on the build platform. (It's host platform is our build
+        # platform.) But some external programs have a target platform, so this
+        # is what we are specifying here.
+        command = env.lookup_binary_entry(for_machine, name)
+        if command is None:
+            return NonExistingExternalProgram()
+        return cls.from_entry(name, command)
+
+    @staticmethod
+    @functools.lru_cache(maxsize=None)
+    def _windows_sanitize_path(path: str) -> str:
+        # Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
+        if 'USERPROFILE' not in os.environ:
+            return path
+        # The WindowsApps directory is a bit of a problem. It contains
+        # some zero-sized .exe files which have "reparse points", that
+        # might either launch an installed application, or might open
+        # a page in the Windows Store to download the application.
+        #
+        # To handle the case where the python interpreter we're
+        # running on came from the Windows Store, if we see the
+        # WindowsApps path in the search path, replace it with
+        # dirname(sys.executable).
+        appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
+        paths = []
+        for each in path.split(os.pathsep):
+            if Path(each) != appstore_dir:
+                paths.append(each)
+            elif 'WindowsApps' in sys.executable:
+                paths.append(os.path.dirname(sys.executable))
+        return os.pathsep.join(paths)
+
+    @staticmethod
+    def from_entry(name: str, command: T.Union[str, T.List[str]]) -> 'ExternalProgram':
+        if isinstance(command, list):
+            if len(command) == 1:
+                command = command[0]
+        # We cannot do any searching if the command is a list, and we don't
+        # need to search if the path is an absolute path.
+        if isinstance(command, list) or os.path.isabs(command):
+            if isinstance(command, str):
+                command = [command]
+            return ExternalProgram(name, command=command, silent=True)
+        assert isinstance(command, str)
+        # Search for the command using the specified string!
+        return ExternalProgram(command, silent=True)
+
+    @staticmethod
+    def _shebang_to_cmd(script: str) -> T.Optional[T.List[str]]:
+        """
+        Check if the file has a shebang and manually parse it to figure out
+        the interpreter to use. This is useful if the script is not executable
+        or if we're on Windows (which does not understand shebangs).
+        """
+        try:
+            with open(script, encoding='utf-8') as f:
+                first_line = f.readline().strip()
+            if first_line.startswith('#!'):
+                # In a shebang, everything before the first space is assumed to
+                # be the command to run and everything after the first space is
+                # the single argument to pass to that command. So we must split
+                # exactly once.
+                commands = first_line[2:].split('#')[0].strip().split(maxsplit=1)
+                if mesonlib.is_windows():
+                    # Windows does not have UNIX paths so remove them,
+                    # but don't remove Windows paths
+                    if commands[0].startswith('/'):
+                        commands[0] = commands[0].split('/')[-1]
+                    if len(commands) > 0 and commands[0] == 'env':
+                        commands = commands[1:]
+                    # Windows does not ship python3.exe, but we know the path to it
+                    if len(commands) > 0 and commands[0] == 'python3':
+                        commands = mesonlib.python_command + commands[1:]
+                elif mesonlib.is_haiku():
+                    # Haiku does not have /usr, but a lot of scripts assume that
+                    # /usr/bin/env always exists. Detect that case and run the
+                    # script with the interpreter after it.
+                    if commands[0] == '/usr/bin/env':
+                        commands = commands[1:]
+                    # We know what python3 is, we're running on it
+                    if len(commands) > 0 and commands[0] == 'python3':
+                        commands = mesonlib.python_command + commands[1:]
+                else:
+                    # Replace python3 with the actual python3 that we are using
+                    if commands[0] == '/usr/bin/env' and commands[1] == 'python3':
+                        commands = mesonlib.python_command + commands[2:]
+                    elif commands[0].split('/')[-1] == 'python3':
+                        commands = mesonlib.python_command + commands[1:]
+                return commands + [script]
+        except Exception as e:
+            mlog.debug(str(e))
+        mlog.debug(f'Unusable script {script!r}')
+        return None
+
+    def _is_executable(self, path: str) -> bool:
+        suffix = os.path.splitext(path)[-1].lower()[1:]
+        execmask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+        if mesonlib.is_windows():
+            if suffix in self.windows_exts:
+                return True
+        elif os.stat(path).st_mode & execmask:
+            return not os.path.isdir(path)
+        return False
+
+    def _search_dir(self, name: str, search_dir: T.Optional[str]) -> T.Optional[list]:
+        if search_dir is None:
+            return None
+        trial = os.path.join(search_dir, name)
+        if os.path.exists(trial):
+            if self._is_executable(trial):
+                return [trial]
+            # Now getting desperate. Maybe it is a script file that is
+            # a) not chmodded executable, or
+            # b) we are on windows so they can't be directly executed.
+            return self._shebang_to_cmd(trial)
+        else:
+            if mesonlib.is_windows():
+                for ext in self.windows_exts:
+                    trial_ext = f'{trial}.{ext}'
+                    if os.path.exists(trial_ext):
+                        return [trial_ext]
+        return None
+
+    def _search_windows_special_cases(self, name: str, command: str) -> T.List[T.Optional[str]]:
+        '''
+        Lots of weird Windows quirks:
+        1. PATH search for @name returns files with extensions from PATHEXT,
+           but only self.windows_exts are executable without an interpreter.
+        2. @name might be an absolute path to an executable, but without the
+           extension. This works inside MinGW so people use it a lot.
+        3. The script is specified without an extension, in which case we have
+           to manually search in PATH.
+        4. More special-casing for the shebang inside the script.
+        '''
+        if command:
+            # On Windows, even if the PATH search returned a full path, we can't be
+            # sure that it can be run directly if it's not a native executable.
+            # For instance, interpreted scripts sometimes need to be run explicitly
+            # with an interpreter if the file association is not done properly.
+            name_ext = os.path.splitext(command)[1]
+            if name_ext[1:].lower() in self.windows_exts:
+                # Good, it can be directly executed
+                return [command]
+            # Try to extract the interpreter from the shebang
+            commands = self._shebang_to_cmd(command)
+            if commands:
+                return commands
+            return [None]
+        # Maybe the name is an absolute path to a native Windows
+        # executable, but without the extension. This is technically wrong,
+        # but many people do it because it works in the MinGW shell.
+        if os.path.isabs(name):
+            for ext in self.windows_exts:
+                command = f'{name}.{ext}'
+                if os.path.exists(command):
+                    return [command]
+        # On Windows, interpreted scripts must have an extension otherwise they
+        # cannot be found by a standard PATH search. So we do a custom search
+        # where we manually search for a script with a shebang in PATH.
+        search_dirs = self._windows_sanitize_path(os.environ.get('PATH', '')).split(';')
+        for search_dir in search_dirs:
+            commands = self._search_dir(name, search_dir)
+            if commands:
+                return commands
+        return [None]
+
+    def _search(self, name: str, search_dir: T.Optional[str]) -> T.List[T.Optional[str]]:
+        '''
+        Search in the specified dir for the specified executable by name
+        and if not found search in PATH
+        '''
+        commands = self._search_dir(name, search_dir)
+        if commands:
+            return commands
+        # If there is a directory component, do not look in PATH
+        if os.path.dirname(name) and not os.path.isabs(name):
+            return [None]
+        # Do a standard search in PATH
+        path = os.environ.get('PATH', None)
+        if mesonlib.is_windows() and path:
+            path = self._windows_sanitize_path(path)
+        command = shutil.which(name, path=path)
+        if mesonlib.is_windows():
+            return self._search_windows_special_cases(name, command)
+        # On UNIX-like platforms, shutil.which() is enough to find
+        # all executables whether in PATH or with an absolute path
+        return [command]
+
+    def found(self) -> bool:
+        return self.command[0] is not None
+
+    def get_command(self) -> T.List[str]:
+        return self.command[:]
+
+    def get_path(self) -> T.Optional[str]:
+        return self.path
+
+    def get_name(self) -> str:
+        return self.name
+
+
+class NonExistingExternalProgram(ExternalProgram):  # lgtm [py/missing-call-to-init]
+    "A program that will never exist"
+
+    def __init__(self, name: str = 'nonexistingprogram') -> None:
+        self.name = name
+        self.command = [None]
+        self.path = None
+
+    def __repr__(self) -> str:
+        r = '<{} {!r} -> {!r}>'
+        return r.format(self.__class__.__name__, self.name, self.command)
+
+    def found(self) -> bool:
+        return False
+
+
+class EmptyExternalProgram(ExternalProgram):  # lgtm [py/missing-call-to-init]
+    '''
+    A program object that returns an empty list of commands. Used for cases
+    such as a cross file exe_wrapper to represent that it's not required.
+    '''
+
+    def __init__(self) -> None:
+        self.name = None
+        self.command = []
+        self.path = None
+
+    def __repr__(self) -> str:
+        r = '<{} {!r} -> {!r}>'
+        return r.format(self.__class__.__name__, self.name, self.command)
+
+    def found(self) -> bool:
+        return True
+
+
+class OverrideProgram(ExternalProgram):
+
+    """A script overriding a program."""
+
+
+def find_external_program(env: 'Environment', for_machine: MachineChoice, name: str,
+                          display_name: str, default_names: T.List[str],
+                          allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+    """Find an external program, chcking the cross file plus any default options."""
+    # Lookup in cross or machine file.
+    potential_cmd = env.lookup_binary_entry(for_machine, name)
+    if potential_cmd is not None:
+        mlog.debug(f'{display_name} binary for {for_machine} specified from cross file, native file, '
+                   f'or env var as {potential_cmd}')
+        yield ExternalProgram.from_entry(name, potential_cmd)
+        # We never fallback if the user-specified option is no good, so
+        # stop returning options.
+        return
+    mlog.debug(f'{display_name} binary missing from cross or native file, or env var undefined.')
+    # Fallback on hard-coded defaults, if a default binary is allowed for use
+    # with cross targets, or if this is not a cross target
+    if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+        for potential_path in default_names:
+            mlog.debug(f'Trying a default {display_name} fallback at', potential_path)
+            yield ExternalProgram(potential_path, silent=True)
+    else:
+        mlog.debug('Default target is not allowed for cross use')
diff --git a/meson/mesonbuild/rewriter.py b/meson/mesonbuild/rewriter.py
new file mode 100644
index 000000000..3f661a4b6
--- /dev/null
+++ b/meson/mesonbuild/rewriter.py
@@ -0,0 +1,970 @@
+#!/usr/bin/env python3
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+# This tool is used to manipulate an existing Meson build definition.
+#
+# - add a file to a target
+# - remove files from a target
+# - move targets
+# - reindent?
+
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter
+from mesonbuild.mesonlib import MesonException
+from . import mlog, environment
+from functools import wraps
+from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, BaseNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, StringNode
+import json, os, re, sys
+import typing as T
+
+class RewriterException(MesonException):
+    pass
+
+def add_arguments(parser, formatter=None):
+    parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.')
+    parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output')
+    parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting')
+    subparsers = parser.add_subparsers(dest='type', title='Rewriter commands', description='Rewrite command to execute')
+
+    # Target
+    tgt_parser = subparsers.add_parser('target', help='Modify a target', formatter_class=formatter)
+    tgt_parser.add_argument('-s', '--subdir', default='', dest='subdir', help='Subdirectory of the new target (only for the "add_target" action)')
+    tgt_parser.add_argument('--type', dest='tgt_type', choices=rewriter_keys['target']['target_type'][2], default='executable',
+                            help='Type of the target to add (only for the "add_target" action)')
+    tgt_parser.add_argument('target', help='Name or ID of the target')
+    tgt_parser.add_argument('operation', choices=['add', 'rm', 'add_target', 'rm_target', 'info'],
+                            help='Action to execute')
+    tgt_parser.add_argument('sources', nargs='*', help='Sources to add/remove')
+
+    # KWARGS
+    kw_parser = subparsers.add_parser('kwargs', help='Modify keyword arguments', formatter_class=formatter)
+    kw_parser.add_argument('operation', choices=rewriter_keys['kwargs']['operation'][2],
+                           help='Action to execute')
+    kw_parser.add_argument('function', choices=list(rewriter_func_kwargs.keys()),
+                           help='Function type to modify')
+    kw_parser.add_argument('id', help='ID of the function to modify (can be anything for "project")')
+    kw_parser.add_argument('kwargs', nargs='*', help='Pairs of keyword and value')
+
+    # Default options
+    def_parser = subparsers.add_parser('default-options', help='Modify the project default options', formatter_class=formatter)
+    def_parser.add_argument('operation', choices=rewriter_keys['default_options']['operation'][2],
+                            help='Action to execute')
+    def_parser.add_argument('options', nargs='*', help='Key, value pairs of configuration option')
+
+    # JSON file/command
+    cmd_parser = subparsers.add_parser('command', help='Execute a JSON array of commands', formatter_class=formatter)
+    cmd_parser.add_argument('json', help='JSON string or file to execute')
+
+class RequiredKeys:
+    def __init__(self, keys):
+        self.keys = keys
+
+    def __call__(self, f):
+        @wraps(f)
+        def wrapped(*wrapped_args, **wrapped_kwargs):
+            assert(len(wrapped_args) >= 2)
+            cmd = wrapped_args[1]
+            for key, val in self.keys.items():
+                typ = val[0] # The type of the value
+                default = val[1] # The default value -- None is required
+                choices = val[2] # Valid choices -- None is for everything
+                if key not in cmd:
+                    if default is not None:
+                        cmd[key] = default
+                    else:
+                        raise RewriterException('Key "{}" is missing in object for {}'
+                                                .format(key, f.__name__))
+                if not isinstance(cmd[key], typ):
+                    raise RewriterException('Invalid type of "{}". Required is {} but provided was {}'
+                                            .format(key, typ.__name__, type(cmd[key]).__name__))
+                if choices is not None:
+                    assert(isinstance(choices, list))
+                    if cmd[key] not in choices:
+                        raise RewriterException('Invalid value of "{}": Possible values are {} but provided was "{}"'
+                                                .format(key, choices, cmd[key]))
+            return f(*wrapped_args, **wrapped_kwargs)
+
+        return wrapped
+
+class MTypeBase:
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        if node is None:
+            self.node = self._new_node()  # lgtm [py/init-calls-subclass] (node creation does not depend on base class state)
+        else:
+            self.node = node
+        self.node_type = None
+        for i in self.supported_nodes():  # lgtm [py/init-calls-subclass] (listing nodes does not depend on base class state)
+            if isinstance(self.node, i):
+                self.node_type = i
+
+    def _new_node(self):
+        # Overwrite in derived class
+        raise RewriterException('Internal error: _new_node of MTypeBase was called')
+
+    def can_modify(self):
+        return self.node_type is not None
+
+    def get_node(self):
+        return self.node
+
+    def supported_nodes(self):
+        # Overwrite in derived class
+        return []
+
+    def set_value(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot set the value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+    def add_value(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot add a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+    def remove_value(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+    def remove_regex(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping')
+
+class MTypeStr(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return StringNode(Token('', '', 0, 0, 0, None, ''))
+
+    def supported_nodes(self):
+        return [StringNode]
+
+    def set_value(self, value):
+        self.node.value = str(value)
+
+class MTypeBool(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return BooleanNode(Token('', '', 0, 0, 0, None, False))
+
+    def supported_nodes(self):
+        return [BooleanNode]
+
+    def set_value(self, value):
+        self.node.value = bool(value)
+
+class MTypeID(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return IdNode(Token('', '', 0, 0, 0, None, ''))
+
+    def supported_nodes(self):
+        return [IdNode]
+
+    def set_value(self, value):
+        self.node.value = str(value)
+
+class MTypeList(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return ArrayNode(ArgumentNode(Token('', '', 0, 0, 0, None, '')), 0, 0, 0, 0)
+
+    def _new_element_node(self, value):
+        # Overwrite in derived class
+        raise RewriterException('Internal error: _new_element_node of MTypeList was called')
+
+    def _ensure_array_node(self):
+        if not isinstance(self.node, ArrayNode):
+            tmp = self.node
+            self.node = self._new_node()
+            self.node.args.arguments += [tmp]
+
+    def _check_is_equal(self, node, value) -> bool:
+        # Overwrite in derived class
+        return False
+
+    def _check_regex_matches(self, node, regex: str) -> bool:
+        # Overwrite in derived class
+        return False
+
+    def get_node(self):
+        if isinstance(self.node, ArrayNode):
+            if len(self.node.args.arguments) == 1:
+                return self.node.args.arguments[0]
+        return self.node
+
+    def supported_element_nodes(self):
+        # Overwrite in derived class
+        return []
+
+    def supported_nodes(self):
+        return [ArrayNode] + self.supported_element_nodes()
+
+    def set_value(self, value):
+        if not isinstance(value, list):
+            value = [value]
+        self._ensure_array_node()
+        self.node.args.arguments = [] # Remove all current nodes
+        for i in value:
+            self.node.args.arguments += [self._new_element_node(i)]
+
+    def add_value(self, value):
+        if not isinstance(value, list):
+            value = [value]
+        self._ensure_array_node()
+        for i in value:
+            self.node.args.arguments += [self._new_element_node(i)]
+
+    def _remove_helper(self, value, equal_func):
+        def check_remove_node(node):
+            for j in value:
+                if equal_func(i, j):
+                    return True
+            return False
+
+        if not isinstance(value, list):
+            value = [value]
+        self._ensure_array_node()
+        removed_list = []
+        for i in self.node.args.arguments:
+            if not check_remove_node(i):
+                removed_list += [i]
+        self.node.args.arguments = removed_list
+
+    def remove_value(self, value):
+        self._remove_helper(value, self._check_is_equal)
+
+    def remove_regex(self, regex: str):
+        self._remove_helper(regex, self._check_regex_matches)
+
+class MTypeStrList(MTypeList):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_element_node(self, value):
+        return StringNode(Token('', '', 0, 0, 0, None, str(value)))
+
+    def _check_is_equal(self, node, value) -> bool:
+        if isinstance(node, StringNode):
+            return node.value == value
+        return False
+
+    def _check_regex_matches(self, node, regex: str) -> bool:
+        if isinstance(node, StringNode):
+            return re.match(regex, node.value) is not None
+        return False
+
+    def supported_element_nodes(self):
+        return [StringNode]
+
+class MTypeIDList(MTypeList):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_element_node(self, value):
+        return IdNode(Token('', '', 0, 0, 0, None, str(value)))
+
+    def _check_is_equal(self, node, value) -> bool:
+        if isinstance(node, IdNode):
+            return node.value == value
+        return False
+
+    def _check_regex_matches(self, node, regex: str) -> bool:
+        if isinstance(node, StringNode):
+            return re.match(regex, node.value) is not None
+        return False
+
+    def supported_element_nodes(self):
+        return [IdNode]
+
+rewriter_keys = {
+    'default_options': {
+        'operation': (str, None, ['set', 'delete']),
+        'options': (dict, {}, None)
+    },
+    'kwargs': {
+        'function': (str, None, None),
+        'id': (str, None, None),
+        'operation': (str, None, ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']),
+        'kwargs': (dict, {}, None)
+    },
+    'target': {
+        'target': (str, None, None),
+        'operation': (str, None, ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']),
+        'sources': (list, [], None),
+        'subdir': (str, '', None),
+        'target_type': (str, 'executable', ['both_libraries', 'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library']),
+    }
+}
+
+rewriter_func_kwargs = {
+    'dependency': {
+        'language': MTypeStr,
+        'method': MTypeStr,
+        'native': MTypeBool,
+        'not_found_message': MTypeStr,
+        'required': MTypeBool,
+        'static': MTypeBool,
+        'version': MTypeStrList,
+        'modules': MTypeStrList
+    },
+    'target': {
+        'build_by_default': MTypeBool,
+        'build_rpath': MTypeStr,
+        'dependencies': MTypeIDList,
+        'gui_app': MTypeBool,
+        'link_with': MTypeIDList,
+        'export_dynamic': MTypeBool,
+        'implib': MTypeBool,
+        'install': MTypeBool,
+        'install_dir': MTypeStr,
+        'install_rpath': MTypeStr,
+        'pie': MTypeBool
+    },
+    'project': {
+        'default_options': MTypeStrList,
+        'meson_version': MTypeStr,
+        'license': MTypeStrList,
+        'subproject_dir': MTypeStr,
+        'version': MTypeStr
+    }
+}
+
+class Rewriter:
+    def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False):
+        self.sourcedir = sourcedir
+        self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+        self.skip_errors = skip_errors
+        self.modified_nodes = []
+        self.to_remove_nodes = []
+        self.to_add_nodes = []
+        self.functions = {
+            'default_options': self.process_default_options,
+            'kwargs': self.process_kwargs,
+            'target': self.process_target,
+        }
+        self.info_dump = None
+
+    def analyze_meson(self):
+        mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename)))
+        self.interpreter.analyze()
+        mlog.log('  -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name']))
+        mlog.log('  -- Version:', mlog.cyan(self.interpreter.project_data['version']))
+
+    def add_info(self, cmd_type: str, cmd_id: str, data: dict):
+        if self.info_dump is None:
+            self.info_dump = {}
+        if cmd_type not in self.info_dump:
+            self.info_dump[cmd_type] = {}
+        self.info_dump[cmd_type][cmd_id] = data
+
+    def print_info(self):
+        if self.info_dump is None:
+            return
+        sys.stderr.write(json.dumps(self.info_dump, indent=2))
+
+    def on_error(self):
+        if self.skip_errors:
+            return mlog.cyan('-->'), mlog.yellow('skipping')
+        return mlog.cyan('-->'), mlog.red('aborting')
+
+    def handle_error(self):
+        if self.skip_errors:
+            return None
+        raise MesonException('Rewriting the meson.build failed')
+
+    def find_target(self, target: str):
+        def check_list(name: str) -> T.List[BaseNode]:
+            result = []
+            for i in self.interpreter.targets:
+                if name == i['name'] or name == i['id']:
+                    result += [i]
+            return result
+
+        targets = check_list(target)
+        if targets:
+            if len(targets) == 1:
+                return targets[0]
+            else:
+                mlog.error('There are multiple targets matching', mlog.bold(target))
+                for i in targets:
+                    mlog.error('  -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id']))
+                mlog.error('Please try again with the unique ID of the target', *self.on_error())
+                self.handle_error()
+                return None
+
+        # Check the assignments
+        tgt = None
+        if target in self.interpreter.assignments:
+            node = self.interpreter.assignments[target]
+            if isinstance(node, FunctionNode):
+                if node.func_name in ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']:
+                    tgt = self.interpreter.assign_vals[target]
+
+        return tgt
+
+    def find_dependency(self, dependency: str):
+        def check_list(name: str):
+            for i in self.interpreter.dependencies:
+                if name == i['name']:
+                    return i
+            return None
+
+        dep = check_list(dependency)
+        if dep is not None:
+            return dep
+
+        # Check the assignments
+        if dependency in self.interpreter.assignments:
+            node = self.interpreter.assignments[dependency]
+            if isinstance(node, FunctionNode):
+                if node.func_name in ['dependency']:
+                    name = self.interpreter.flatten_args(node.args)[0]
+                    dep = check_list(name)
+
+        return dep
+
+    @RequiredKeys(rewriter_keys['default_options'])
+    def process_default_options(self, cmd):
+        # First, remove the old values
+        kwargs_cmd = {
+            'function': 'project',
+            'id': "/",
+            'operation': 'remove_regex',
+            'kwargs': {
+                'default_options': [f'{x}=.*' for x in cmd['options'].keys()]
+            }
+        }
+        self.process_kwargs(kwargs_cmd)
+
+        # Then add the new values
+        if cmd['operation'] != 'set':
+            return
+
+        kwargs_cmd['operation'] = 'add'
+        kwargs_cmd['kwargs']['default_options'] = []
+
+        cdata = self.interpreter.coredata
+        options = {
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+        }
+
+        for key, val in sorted(cmd['options'].items()):
+            if key not in options:
+                mlog.error('Unknown options', mlog.bold(key), *self.on_error())
+                self.handle_error()
+                continue
+
+            try:
+                val = options[key].validate_value(val)
+            except MesonException as e:
+                mlog.error('Unable to set', mlog.bold(key), mlog.red(str(e)), *self.on_error())
+                self.handle_error()
+                continue
+
+            kwargs_cmd['kwargs']['default_options'] += [f'{key}={val}']
+
+        self.process_kwargs(kwargs_cmd)
+
+    @RequiredKeys(rewriter_keys['kwargs'])
+    def process_kwargs(self, cmd):
+        mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'"))
+        if cmd['function'] not in rewriter_func_kwargs:
+            mlog.error('Unknown function type', cmd['function'], *self.on_error())
+            return self.handle_error()
+        kwargs_def = rewriter_func_kwargs[cmd['function']]
+
+        # Find the function node to modify
+        node = None
+        arg_node = None
+        if cmd['function'] == 'project':
+            # msys bash may expand '/' to a path. It will mangle '//' to '/'
+            # but in order to keep usage shell-agnostic, also allow `//` as
+            # the function ID such that it will work in both msys bash and
+            # other shells.
+            if {'/', '//'}.isdisjoint({cmd['id']}):
+                mlog.error('The ID for the function type project must be "/" or "//" not "' + cmd['id'] + '"', *self.on_error())
+                return self.handle_error()
+            node = self.interpreter.project_node
+            arg_node = node.args
+        elif cmd['function'] == 'target':
+            tmp = self.find_target(cmd['id'])
+            if tmp:
+                node = tmp['node']
+                arg_node = node.args
+        elif cmd['function'] == 'dependency':
+            tmp = self.find_dependency(cmd['id'])
+            if tmp:
+                node = tmp['node']
+                arg_node = node.args
+        if not node:
+            mlog.error('Unable to find the function node')
+        assert(isinstance(node, FunctionNode))
+        assert(isinstance(arg_node, ArgumentNode))
+        # Transform the key nodes to plain strings
+        arg_node.kwargs = {k.value: v for k, v in arg_node.kwargs.items()}
+
+        # Print kwargs info
+        if cmd['operation'] == 'info':
+            info_data = {}
+            for key, val in sorted(arg_node.kwargs.items()):
+                info_data[key] = None
+                if isinstance(val, ElementaryNode):
+                    info_data[key] = val.value
+                elif isinstance(val, ArrayNode):
+                    data_list = []
+                    for i in val.args.arguments:
+                        element = None
+                        if isinstance(i, ElementaryNode):
+                            element = i.value
+                        data_list += [element]
+                    info_data[key] = data_list
+
+            self.add_info('kwargs', '{}#{}'.format(cmd['function'], cmd['id']), info_data)
+            return # Nothing else to do
+
+        # Modify the kwargs
+        num_changed = 0
+        for key, val in sorted(cmd['kwargs'].items()):
+            if key not in kwargs_def:
+                mlog.error('Cannot modify unknown kwarg', mlog.bold(key), *self.on_error())
+                self.handle_error()
+                continue
+
+            # Remove the key from the kwargs
+            if cmd['operation'] == 'delete':
+                if key in arg_node.kwargs:
+                    mlog.log('  -- Deleting', mlog.bold(key), 'from the kwargs')
+                    del arg_node.kwargs[key]
+                    num_changed += 1
+                else:
+                    mlog.log('  -- Key', mlog.bold(key), 'is already deleted')
+                continue
+
+            if key not in arg_node.kwargs:
+                arg_node.kwargs[key] = None
+            modifyer = kwargs_def[key](arg_node.kwargs[key])
+            if not modifyer.can_modify():
+                mlog.log('  -- Skipping', mlog.bold(key), 'because it is to complex to modify')
+
+            # Apply the operation
+            val_str = str(val)
+            if cmd['operation'] == 'set':
+                mlog.log('  -- Setting', mlog.bold(key), 'to', mlog.yellow(val_str))
+                modifyer.set_value(val)
+            elif cmd['operation'] == 'add':
+                mlog.log('  -- Adding', mlog.yellow(val_str), 'to', mlog.bold(key))
+                modifyer.add_value(val)
+            elif cmd['operation'] == 'remove':
+                mlog.log('  -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key))
+                modifyer.remove_value(val)
+            elif cmd['operation'] == 'remove_regex':
+                mlog.log('  -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key))
+                modifyer.remove_regex(val)
+
+            # Write back the result
+            arg_node.kwargs[key] = modifyer.get_node()
+            num_changed += 1
+
+        # Convert the keys back to IdNode's
+        arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in arg_node.kwargs.items()}
+        if num_changed > 0 and node not in self.modified_nodes:
+            self.modified_nodes += [node]
+
+    def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
+        if node.ast_id and node.ast_id in self.interpreter.reverse_assignment:
+            return self.interpreter.reverse_assignment[node.ast_id]
+        return None
+
+    @RequiredKeys(rewriter_keys['target'])
+    def process_target(self, cmd):
+        mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
+        target = self.find_target(cmd['target'])
+        if target is None and cmd['operation'] != 'target_add':
+            mlog.error('Unknown target', mlog.bold(cmd['target']), *self.on_error())
+            return self.handle_error()
+
+        # Make source paths relative to the current subdir
+        def rel_source(src: str) -> str:
+            subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir']))
+            if os.path.isabs(src):
+                return os.path.relpath(src, subdir)
+            elif not os.path.exists(src):
+                return src # Trust the user when the source doesn't exist
+            # Make sure that the path is relative to the subdir
+            return os.path.relpath(os.path.abspath(src), subdir)
+
+        if target is not None:
+            cmd['sources'] = [rel_source(x) for x in cmd['sources']]
+
+        # Utility function to get a list of the sources from a node
+        def arg_list_from_node(n):
+            args = []
+            if isinstance(n, FunctionNode):
+                args = list(n.args.arguments)
+                if n.func_name in build_target_functions:
+                    args.pop(0)
+            elif isinstance(n, ArrayNode):
+                args = n.args.arguments
+            elif isinstance(n, ArgumentNode):
+                args = n.arguments
+            return args
+
+        to_sort_nodes = []
+
+        if cmd['operation'] == 'src_add':
+            node = None
+            if target['sources']:
+                node = target['sources'][0]
+            else:
+                node = target['node']
+            assert(node is not None)
+
+            # Generate the current source list
+            src_list = []
+            for i in target['sources']:
+                for j in arg_list_from_node(i):
+                    if isinstance(j, StringNode):
+                        src_list += [j.value]
+
+            # Generate the new String nodes
+            to_append = []
+            for i in sorted(set(cmd['sources'])):
+                if i in src_list:
+                    mlog.log('  -- Source', mlog.green(i), 'is already defined for the target --> skipping')
+                    continue
+                mlog.log('  -- Adding source', mlog.green(i), 'at',
+                         mlog.yellow(f'{node.filename}:{node.lineno}'))
+                token = Token('string', node.filename, 0, 0, 0, None, i)
+                to_append += [StringNode(token)]
+
+            # Append to the AST at the right place
+            arg_node = None
+            if isinstance(node, (FunctionNode, ArrayNode)):
+                arg_node = node.args
+            elif isinstance(node, ArgumentNode):
+                arg_node = node
+            assert(arg_node is not None)
+            arg_node.arguments += to_append
+
+            # Mark the node as modified
+            if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode):
+                to_sort_nodes += [arg_node]
+            if node not in self.modified_nodes:
+                self.modified_nodes += [node]
+
+        elif cmd['operation'] == 'src_rm':
+            # Helper to find the exact string node and its parent
+            def find_node(src):
+                for i in target['sources']:
+                    for j in arg_list_from_node(i):
+                        if isinstance(j, StringNode):
+                            if j.value == src:
+                                return i, j
+                return None, None
+
+            for i in cmd['sources']:
+                # Try to find the node with the source string
+                root, string_node = find_node(i)
+                if root is None:
+                    mlog.warning('  -- Unable to find source', mlog.green(i), 'in the target')
+                    continue
+
+                # Remove the found string node from the argument list
+                arg_node = None
+                if isinstance(root, (FunctionNode, ArrayNode)):
+                    arg_node = root.args
+                elif isinstance(root, ArgumentNode):
+                    arg_node = root
+                assert(arg_node is not None)
+                mlog.log('  -- Removing source', mlog.green(i), 'from',
+                         mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
+                arg_node.arguments.remove(string_node)
+
+                # Mark the node as modified
+                if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
+                    to_sort_nodes += [arg_node]
+                if root not in self.modified_nodes:
+                    self.modified_nodes += [root]
+
+        elif cmd['operation'] == 'target_add':
+            if target is not None:
+                mlog.error('Can not add target', mlog.bold(cmd['target']), 'because it already exists', *self.on_error())
+                return self.handle_error()
+
+            id_base = re.sub(r'[- ]', '_', cmd['target'])
+            target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
+            source_id = id_base + '_sources'
+            filename = os.path.join(cmd['subdir'], environment.build_filename)
+
+            # Build src list
+            src_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            src_arr_node = ArrayNode(src_arg_node, 0, 0, 0, 0)
+            src_far_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            src_fun_node = FunctionNode(filename, 0, 0, 0, 0, 'files', src_far_node)
+            src_ass_node = AssignmentNode(filename, 0, 0, source_id, src_fun_node)
+            src_arg_node.arguments = [StringNode(Token('string', filename, 0, 0, 0, None, x)) for x in cmd['sources']]
+            src_far_node.arguments = [src_arr_node]
+
+            # Build target
+            tgt_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            tgt_fun_node = FunctionNode(filename, 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
+            tgt_ass_node = AssignmentNode(filename, 0, 0, target_id, tgt_fun_node)
+            tgt_arg_node.arguments = [
+                StringNode(Token('string', filename, 0, 0, 0, None, cmd['target'])),
+                IdNode(Token('string', filename, 0, 0, 0, None, source_id))
+            ]
+
+            src_ass_node.accept(AstIndentationGenerator())
+            tgt_ass_node.accept(AstIndentationGenerator())
+            self.to_add_nodes += [src_ass_node, tgt_ass_node]
+
+        elif cmd['operation'] == 'target_rm':
+            to_remove = self.find_assignment_node(target['node'])
+            if to_remove is None:
+                to_remove = target['node']
+            self.to_remove_nodes += [to_remove]
+            mlog.log('  -- Removing target', mlog.green(cmd['target']), 'at',
+                     mlog.yellow(f'{to_remove.filename}:{to_remove.lineno}'))
+
+        elif cmd['operation'] == 'info':
+            # T.List all sources in the target
+            src_list = []
+            for i in target['sources']:
+                for j in arg_list_from_node(i):
+                    if isinstance(j, StringNode):
+                        src_list += [j.value]
+            test_data = {
+                'name': target['name'],
+                'sources': src_list
+            }
+            self.add_info('target', target['id'], test_data)
+
+        # Sort files
+        for i in to_sort_nodes:
+            convert = lambda text: int(text) if text.isdigit() else text.lower()
+            alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
+            path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))])
+
+            unknown = [x for x in i.arguments if not isinstance(x, StringNode)]
+            sources = [x for x in i.arguments if isinstance(x, StringNode)]
+            sources = sorted(sources, key=lambda x: path_sorter(x.value))
+            i.arguments = unknown + sources
+
+    def process(self, cmd):
+        if 'type' not in cmd:
+            raise RewriterException('Command has no key "type"')
+        if cmd['type'] not in self.functions:
+            raise RewriterException('Unknown command "{}". Supported commands are: {}'
+                                    .format(cmd['type'], list(self.functions.keys())))
+        self.functions[cmd['type']](cmd)
+
+    def apply_changes(self):
+        assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.modified_nodes))
+        assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.to_remove_nodes))
+        assert(all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modified_nodes))
+        assert(all(isinstance(x, (ArrayNode, AssignmentNode, FunctionNode)) for x in self.to_remove_nodes))
+        # Sort based on line and column in reversed order
+        work_nodes = [{'node': x, 'action': 'modify'} for x in self.modified_nodes]
+        work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes]
+        work_nodes = list(sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True))
+        work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes]
+
+        # Generating the new replacement string
+        str_list = []
+        for i in work_nodes:
+            new_data = ''
+            if i['action'] == 'modify' or i['action'] == 'add':
+                printer = AstPrinter()
+                i['node'].accept(printer)
+                printer.post_process()
+                new_data = printer.result.strip()
+            data = {
+                'file': i['node'].filename,
+                'str': new_data,
+                'node': i['node'],
+                'action': i['action']
+            }
+            str_list += [data]
+
+        # Load build files
+        files = {}
+        for i in str_list:
+            if i['file'] in files:
+                continue
+            fpath = os.path.realpath(os.path.join(self.sourcedir, i['file']))
+            fdata = ''
+            # Create an empty file if it does not exist
+            if not os.path.exists(fpath):
+                with open(fpath, 'w', encoding='utf-8'):
+                    pass
+            with open(fpath, encoding='utf-8') as fp:
+                fdata = fp.read()
+
+            # Generate line offsets numbers
+            m_lines = fdata.splitlines(True)
+            offset = 0
+            line_offsets = []
+            for j in m_lines:
+                line_offsets += [offset]
+                offset += len(j)
+
+            files[i['file']] = {
+                'path': fpath,
+                'raw': fdata,
+                'offsets': line_offsets
+            }
+
+        # Replace in source code
+        def remove_node(i):
+            offsets = files[i['file']]['offsets']
+            raw = files[i['file']]['raw']
+            node = i['node']
+            line = node.lineno - 1
+            col = node.colno
+            start = offsets[line] + col
+            end = start
+            if isinstance(node, (ArrayNode, FunctionNode)):
+                end = offsets[node.end_lineno - 1] + node.end_colno
+
+            # Only removal is supported for assignments
+            elif isinstance(node, AssignmentNode) and i['action'] == 'rm':
+                if isinstance(node.value, (ArrayNode, FunctionNode)):
+                    remove_node({'file': i['file'], 'str': '', 'node': node.value, 'action': 'rm'})
+                    raw = files[i['file']]['raw']
+                while raw[end] != '=':
+                    end += 1
+                end += 1 # Handle the '='
+                while raw[end] in [' ', '\n', '\t']:
+                    end += 1
+
+            files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:]
+
+        for i in str_list:
+            if i['action'] in ['modify', 'rm']:
+                remove_node(i)
+            elif i['action'] in ['add']:
+                files[i['file']]['raw'] += i['str'] + '\n'
+
+        # Write the files back
+        for key, val in files.items():
+            mlog.log('Rewriting', mlog.yellow(key))
+            with open(val['path'], 'w', encoding='utf-8') as fp:
+                fp.write(val['raw'])
+
+target_operation_map = {
+    'add': 'src_add',
+    'rm': 'src_rm',
+    'add_target': 'target_add',
+    'rm_target': 'target_rm',
+    'info': 'info',
+}
+
+def list_to_dict(in_list: T.List[str]) -> T.Dict[str, str]:
+    result = {}
+    it = iter(in_list)
+    try:
+        for i in it:
+            # calling next(it) is not a mistake, we're taking the next element from
+            # the iterator, avoiding the need to preprocess it into a sequence of
+            # key value pairs.
+            result[i] = next(it)
+    except StopIteration:
+        raise TypeError('in_list parameter of list_to_dict must have an even length.')
+    return result
+
+def generate_target(options) -> T.List[dict]:
+    return [{
+        'type': 'target',
+        'target': options.target,
+        'operation': target_operation_map[options.operation],
+        'sources': options.sources,
+        'subdir': options.subdir,
+        'target_type': options.tgt_type,
+    }]
+
+def generate_kwargs(options) -> T.List[dict]:
+    return [{
+        'type': 'kwargs',
+        'function': options.function,
+        'id': options.id,
+        'operation': options.operation,
+        'kwargs': list_to_dict(options.kwargs),
+    }]
+
+def generate_def_opts(options) -> T.List[dict]:
+    return [{
+        'type': 'default_options',
+        'operation': options.operation,
+        'options': list_to_dict(options.options),
+    }]
+
+def generate_cmd(options) -> T.List[dict]:
+    if os.path.exists(options.json):
+        with open(options.json, encoding='utf-8') as fp:
+            return json.load(fp)
+    else:
+        return json.loads(options.json)
+
+# Map options.type to the actual type name
+cli_type_map = {
+    'target': generate_target,
+    'tgt': generate_target,
+    'kwargs': generate_kwargs,
+    'default-options': generate_def_opts,
+    'def': generate_def_opts,
+    'command': generate_cmd,
+    'cmd': generate_cmd,
+}
+
+def run(options):
+    if not options.verbose:
+        mlog.set_quiet()
+
+    try:
+        rewriter = Rewriter(options.sourcedir, skip_errors=options.skip)
+        rewriter.analyze_meson()
+
+        if options.type is None:
+            mlog.error('No command specified')
+            return 1
+
+        commands = cli_type_map[options.type](options)
+
+        if not isinstance(commands, list):
+            raise TypeError('Command is not a list')
+
+        for i in commands:
+            if not isinstance(i, object):
+                raise TypeError('Command is not an object')
+            rewriter.process(i)
+
+        rewriter.apply_changes()
+        rewriter.print_info()
+        return 0
+    except Exception as e:
+        raise e
+    finally:
+        mlog.set_verbose()
diff --git a/meson/mesonbuild/scripts/__init__.py b/meson/mesonbuild/scripts/__init__.py
new file mode 100644
index 000000000..2edbe8899
--- /dev/null
+++ b/meson/mesonbuild/scripts/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: consider switching to pathlib for this
+def destdir_join(d1: str, d2: str) -> str:
+    # c:\destdir + c:\prefix must produce c:\destdir\prefix
+    if len(d1) > 1 and d1[1] == ':' \
+            and len(d2) > 1 and d2[1] == ':':
+        return d1 + d2[2:]
+    return d1 + d2
diff --git a/meson/mesonbuild/scripts/clangformat.py b/meson/mesonbuild/scripts/clangformat.py
new file mode 100644
index 000000000..8e61b5591
--- /dev/null
+++ b/meson/mesonbuild/scripts/clangformat.py
@@ -0,0 +1,91 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import subprocess
+import itertools
+import fnmatch
+from pathlib import Path
+from concurrent.futures import ThreadPoolExecutor
+
+from ..environment import detect_clangformat
+from ..compilers import lang_suffixes
+import typing as T
+
+def parse_pattern_file(fname: Path) -> T.List[str]:
+    patterns = []
+    try:
+        with fname.open(encoding='utf-8') as f:
+            for line in f:
+                pattern = line.strip()
+                if pattern and not pattern.startswith('#'):
+                    patterns.append(pattern)
+    except FileNotFoundError:
+        pass
+    return patterns
+
+def run_clang_format(exelist: T.List[str], fname: Path, check: bool) -> subprocess.CompletedProcess:
+    if check:
+        original = fname.read_bytes()
+    before = fname.stat().st_mtime
+    args = ['-style=file', '-i', str(fname)]
+    ret = subprocess.run(exelist + args)
+    after = fname.stat().st_mtime
+    if before != after:
+        print('File reformatted: ', fname)
+        if check:
+            # Restore the original if only checking.
+            fname.write_bytes(original)
+            ret.returncode = 1
+    return ret
+
+def clangformat(exelist: T.List[str], srcdir: Path, builddir: Path, check: bool) -> int:
+    patterns = parse_pattern_file(srcdir / '.clang-format-include')
+    if not patterns:
+        patterns = ['**/*']
+    globs = [srcdir.glob(p) for p in patterns]
+    patterns = parse_pattern_file(srcdir / '.clang-format-ignore')
+    ignore = [str(builddir / '*')]
+    ignore.extend([str(srcdir / p) for p in patterns])
+    suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
+    suffixes.add('h')
+    suffixes = {f'.{s}' for s in suffixes}
+    futures = []
+    returncode = 0
+    with ThreadPoolExecutor() as e:
+        for f in itertools.chain(*globs):
+            strf = str(f)
+            if f.is_dir() or f.suffix not in suffixes or \
+                any(fnmatch.fnmatch(strf, i) for i in ignore):
+                continue
+            futures.append(e.submit(run_clang_format, exelist, f, check))
+        returncode = max([x.result().returncode for x in futures])
+    return returncode
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--check', action='store_true')
+    parser.add_argument('sourcedir')
+    parser.add_argument('builddir')
+    options = parser.parse_args(args)
+
+    srcdir = Path(options.sourcedir)
+    builddir = Path(options.builddir)
+
+    exelist = detect_clangformat()
+    if not exelist:
+        print('Could not execute clang-format "%s"' % ' '.join(exelist))
+        return 1
+
+    return clangformat(exelist, srcdir, builddir, options.check)
diff --git a/meson/mesonbuild/scripts/clangtidy.py b/meson/mesonbuild/scripts/clangtidy.py
new file mode 100644
index 000000000..8d366c84d
--- /dev/null
+++ b/meson/mesonbuild/scripts/clangtidy.py
@@ -0,0 +1,57 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pathlib
+import subprocess
+import shutil
+import os
+import re
+from concurrent.futures import ThreadPoolExecutor
+import typing as T
+
+from ..compilers import lang_suffixes
+
+def manual_clangtidy(srcdir_name: str, builddir_name: str) -> int:
+    srcdir = pathlib.Path(srcdir_name)
+    suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
+    suffixes.add('h')
+    futures = []
+    returncode = 0
+    with ThreadPoolExecutor() as e:
+        for f in (x for suff in suffixes for x in srcdir.glob('**/*.' + suff)):
+            if f.is_dir():
+                continue
+            strf = str(f)
+            if strf.startswith(builddir_name):
+                continue
+            futures.append(e.submit(subprocess.run, ['clang-tidy', '-p', builddir_name, strf]))
+        returncode = max([x.result().returncode for x in futures])
+    return returncode
+
+def clangtidy(srcdir_name: str, builddir_name: str) -> int:
+    run_clang_tidy = None
+    for rct in ('run-clang-tidy', 'run-clang-tidy.py'):
+        if shutil.which(rct):
+            run_clang_tidy = rct
+            break
+    if run_clang_tidy:
+        return subprocess.run([run_clang_tidy, '-p', builddir_name, '^(?!' + re.escape(builddir_name + os.path.sep) +').*$']).returncode
+    else:
+        print('Could not find run-clang-tidy, running checks manually.')
+        return manual_clangtidy(srcdir_name, builddir_name)
+
+def run(args: T.List[str]) -> int:
+    srcdir_name = args[0]
+    builddir_name = args[1]
+    return clangtidy(srcdir_name, builddir_name)
diff --git a/meson/mesonbuild/scripts/cleantrees.py b/meson/mesonbuild/scripts/cleantrees.py
new file mode 100644
index 000000000..1a387538e
--- /dev/null
+++ b/meson/mesonbuild/scripts/cleantrees.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import shutil
+import pickle
+import typing as T
+
+def rmtrees(build_dir: str, trees: T.List[str]) -> None:
+    for t in trees:
+        # Never delete trees outside of the builddir
+        if os.path.isabs(t):
+            print(f'Cannot delete dir with absolute path {t!r}')
+            continue
+        bt = os.path.join(build_dir, t)
+        # Skip if it doesn't exist, or if it is not a directory
+        if os.path.isdir(bt):
+            shutil.rmtree(bt, ignore_errors=True)
+
+def run(args: T.List[str]) -> int:
+    if len(args) != 1:
+        print('Cleaner script for Meson. Do not run on your own please.')
+        print('cleantrees.py ')
+        return 1
+    with open(args[0], 'rb') as f:
+        data = pickle.load(f)
+    rmtrees(data.build_dir, data.trees)
+    # Never fail cleaning
+    return 0
+
+if __name__ == '__main__':
+    run(sys.argv[1:])
diff --git a/meson/mesonbuild/scripts/cmake_run_ctgt.py b/meson/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 000000000..dfb70d10f
--- /dev/null
+++ b/meson/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+import shutil
+import sys
+from pathlib import Path
+import typing as T
+
+def run(argsv: T.List[str]) -> int:
+    commands = [[]]  # type: T.List[T.List[str]]
+    SEPARATOR = ';;;'
+
+    # Generate CMD parameters
+    parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+    parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+    parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+    parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+    parser.add_argument('commands', nargs=argparse.REMAINDER, help=f'A "{SEPARATOR}" separated list of commands')
+
+    # Parse
+    args = parser.parse_args(argsv)
+    directory = Path(args.directory)
+
+    dummy_target = None
+    if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+        dummy_target = Path(args.outputs[0])
+    elif len(args.outputs) != len(args.original_outputs):
+        print('Length of output list and original output list differ')
+        return 1
+
+    for i in args.commands:
+        if i == SEPARATOR:
+            commands += [[]]
+            continue
+
+        i = i.replace('"', '')  # Remove lefover quotes
+        commands[-1] += [i]
+
+    # Execute
+    for i in commands:
+        # Skip empty lists
+        if not i:
+            continue
+
+        cmd = []
+        stdout = None
+        stderr = None
+        capture_file = ''
+
+        for j in i:
+            if j in ['>', '>>']:
+                stdout = subprocess.PIPE
+                continue
+            elif j in ['&>', '&>>']:
+                stdout = subprocess.PIPE
+                stderr = subprocess.STDOUT
+                continue
+
+            if stdout is not None or stderr is not None:
+                capture_file += j
+            else:
+                cmd += [j]
+
+        try:
+            directory.mkdir(parents=True, exist_ok=True)
+
+            res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
+            if capture_file:
+                out_file = directory / capture_file
+                out_file.write_bytes(res.stdout)
+        except subprocess.CalledProcessError:
+            return 1
+
+    if dummy_target:
+        dummy_target.touch()
+        return 0
+
+    # Copy outputs
+    zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
+    for expected, generated in zipped_outputs:
+        do_copy = False
+        if not expected.exists():
+            if not generated.exists():
+                print('Unable to find generated file. This can cause the build to fail:')
+                print(generated)
+                do_copy = False
+            else:
+                do_copy = True
+        elif generated.exists():
+            if generated.stat().st_mtime > expected.stat().st_mtime:
+                do_copy = True
+
+        if do_copy:
+            if expected.exists():
+                expected.unlink()
+            shutil.copyfile(str(generated), str(expected))
+
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/cmd_or_ps.ps1 b/meson/mesonbuild/scripts/cmd_or_ps.ps1
new file mode 100644
index 000000000..ccef8e84d
--- /dev/null
+++ b/meson/mesonbuild/scripts/cmd_or_ps.ps1
@@ -0,0 +1,22 @@
+# Copyied from GStreamer project
+# Author: Seungha Yang 
+
+$i=1
+$ppid=(gwmi win32_process -Filter "processid='$pid'").parentprocessid
+$pname=(Get-Process -id $ppid).Name
+While($true) {
+  if($pname -eq "cmd" -Or $pname -eq "powershell") {
+    Write-Host ("{0}.exe" -f $pname)
+    Break
+  }
+
+  # 10 times iteration seems to be sufficient
+  if($i -gt 10) {
+    Break
+  }
+
+  # not found yet, find grand parant
+  $ppid=(gwmi win32_process -Filter "processid='$ppid'").parentprocessid
+  $pname=(Get-Process -id $ppid).Name
+  $i++
+}
diff --git a/meson/mesonbuild/scripts/coverage.py b/meson/mesonbuild/scripts/coverage.py
new file mode 100644
index 000000000..5d552c301
--- /dev/null
+++ b/meson/mesonbuild/scripts/coverage.py
@@ -0,0 +1,173 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import environment, mesonlib
+
+import argparse, re, sys, os, subprocess, pathlib, stat
+import typing as T
+
+def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
+    outfiles = []
+    exitcode = 0
+
+    (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
+
+    # gcovr >= 4.2 requires a different syntax for out of source builds
+    if gcovr_new_rootdir:
+        gcovr_base_cmd = [gcovr_exe, '-r', source_root, build_root]
+    else:
+        gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+
+    if use_llvm_cov:
+        gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+    else:
+        gcov_exe_args = []
+
+    if not outputs or 'xml' in outputs:
+        if gcovr_exe:
+            subprocess.check_call(gcovr_base_cmd +
+                                  ['-x',
+                                   '-e', re.escape(subproject_root),
+                                   '-o', os.path.join(log_dir, 'coverage.xml')
+                                   ] + gcov_exe_args)
+            outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
+        elif outputs:
+            print('gcovr >= 3.3 needed to generate Xml coverage report')
+            exitcode = 1
+
+    if not outputs or 'text' in outputs:
+        if gcovr_exe:
+            subprocess.check_call(gcovr_base_cmd +
+                                  ['-e', re.escape(subproject_root),
+                                   '-o', os.path.join(log_dir, 'coverage.txt')
+                                   ] + gcov_exe_args)
+            outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
+        elif outputs:
+            print('gcovr >= 3.3 needed to generate text coverage report')
+            exitcode = 1
+
+    if not outputs or 'html' in outputs:
+        if lcov_exe and genhtml_exe:
+            htmloutdir = os.path.join(log_dir, 'coveragereport')
+            covinfo = os.path.join(log_dir, 'coverage.info')
+            initial_tracefile = covinfo + '.initial'
+            run_tracefile = covinfo + '.run'
+            raw_tracefile = covinfo + '.raw'
+            if use_llvm_cov:
+                # Create a shim to allow using llvm-cov as a gcov tool.
+                if mesonlib.is_windows():
+                    llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+                    with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_bat:
+                        llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*')
+                else:
+                    llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+                    with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_sh:
+                        llvm_cov_sh.write(f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@')
+                    os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+                gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+            else:
+                gcov_tool_args = []
+            subprocess.check_call([lcov_exe,
+                                   '--directory', build_root,
+                                   '--capture',
+                                   '--initial',
+                                   '--output-file',
+                                   initial_tracefile] +
+                                  gcov_tool_args)
+            subprocess.check_call([lcov_exe,
+                                   '--directory', build_root,
+                                   '--capture',
+                                   '--output-file', run_tracefile,
+                                   '--no-checksum',
+                                   '--rc', 'lcov_branch_coverage=1'] +
+                                  gcov_tool_args)
+            # Join initial and test results.
+            subprocess.check_call([lcov_exe,
+                                   '-a', initial_tracefile,
+                                   '-a', run_tracefile,
+                                   '--rc', 'lcov_branch_coverage=1',
+                                   '-o', raw_tracefile])
+            # Remove all directories outside the source_root from the covinfo
+            subprocess.check_call([lcov_exe,
+                                   '--extract', raw_tracefile,
+                                   os.path.join(source_root, '*'),
+                                   '--rc', 'lcov_branch_coverage=1',
+                                   '--output-file', covinfo])
+            # Remove all directories inside subproject dir
+            subprocess.check_call([lcov_exe,
+                                   '--remove', covinfo,
+                                   os.path.join(subproject_root, '*'),
+                                   '--rc', 'lcov_branch_coverage=1',
+                                   '--output-file', covinfo])
+            subprocess.check_call([genhtml_exe,
+                                   '--prefix', build_root,
+                                   '--prefix', source_root,
+                                   '--output-directory', htmloutdir,
+                                   '--title', 'Code coverage',
+                                   '--legend',
+                                   '--show-details',
+                                   '--branch-coverage',
+                                   covinfo])
+            outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+        elif gcovr_exe:
+            htmloutdir = os.path.join(log_dir, 'coveragereport')
+            if not os.path.isdir(htmloutdir):
+                os.mkdir(htmloutdir)
+            subprocess.check_call(gcovr_base_cmd +
+                                  ['--html',
+                                   '--html-details',
+                                   '--print-summary',
+                                   '-e', re.escape(subproject_root),
+                                   '-o', os.path.join(htmloutdir, 'index.html'),
+                                   ])
+            outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+        elif outputs:
+            print('lcov/genhtml or gcovr >= 3.3 needed to generate Html coverage report')
+            exitcode = 1
+
+    if not outputs and not outfiles:
+        print('Need gcovr or lcov/genhtml to generate any coverage reports')
+        exitcode = 1
+
+    if outfiles:
+        print('')
+        for (filetype, path) in outfiles:
+            print(filetype + ' coverage report can be found at', path.as_uri())
+
+    return exitcode
+
+def run(args: T.List[str]) -> int:
+    if not os.path.isfile('build.ninja'):
+        print('Coverage currently only works with the Ninja backend.')
+        return 1
+    parser = argparse.ArgumentParser(description='Generate coverage reports')
+    parser.add_argument('--text', dest='outputs', action='append_const',
+                        const='text', help='generate Text report')
+    parser.add_argument('--xml', dest='outputs', action='append_const',
+                        const='xml', help='generate Xml report')
+    parser.add_argument('--html', dest='outputs', action='append_const',
+                        const='html', help='generate Html report')
+    parser.add_argument('--use_llvm_cov', action='store_true',
+                        help='use llvm-cov')
+    parser.add_argument('source_root')
+    parser.add_argument('subproject_root')
+    parser.add_argument('build_root')
+    parser.add_argument('log_dir')
+    options = parser.parse_args(args)
+    return coverage(options.outputs, options.source_root,
+                    options.subproject_root, options.build_root,
+                    options.log_dir, options.use_llvm_cov)
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/delwithsuffix.py b/meson/mesonbuild/scripts/delwithsuffix.py
new file mode 100644
index 000000000..873db0d40
--- /dev/null
+++ b/meson/mesonbuild/scripts/delwithsuffix.py
@@ -0,0 +1,36 @@
+# Copyright 2013 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+    if len(args) != 2:
+        print('delwithsuffix.py  ')
+        sys.exit(1)
+
+    topdir = args[0]
+    suffix = args[1]
+    if suffix[0] != '.':
+        suffix = '.' + suffix
+
+    for (root, _, files) in os.walk(topdir):
+        for f in files:
+            if f.endswith(suffix):
+                fullname = os.path.join(root, f)
+                os.unlink(fullname)
+    return 0
+
+if __name__ == '__main__':
+    run(sys.argv[1:])
diff --git a/meson/mesonbuild/scripts/depfixer.py b/meson/mesonbuild/scripts/depfixer.py
new file mode 100644
index 000000000..52c7ba969
--- /dev/null
+++ b/meson/mesonbuild/scripts/depfixer.py
@@ -0,0 +1,509 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import os
+import stat
+import struct
+import shutil
+import subprocess
+import typing as T
+
+from ..mesonlib import OrderedSet
+
+SHT_STRTAB = 3
+DT_NEEDED = 1
+DT_RPATH = 15
+DT_RUNPATH = 29
+DT_STRTAB = 5
+DT_SONAME = 14
+DT_MIPS_RLD_MAP_REL = 1879048245
+
+# Global cache for tools
+INSTALL_NAME_TOOL = False
+
+class DataSizes:
+    def __init__(self, ptrsize: int, is_le: bool) -> None:
+        if is_le:
+            p = '<'
+        else:
+            p = '>'
+        self.Half = p + 'h'
+        self.HalfSize = 2
+        self.Word = p + 'I'
+        self.WordSize = 4
+        self.Sword = p + 'i'
+        self.SwordSize = 4
+        if ptrsize == 64:
+            self.Addr = p + 'Q'
+            self.AddrSize = 8
+            self.Off = p + 'Q'
+            self.OffSize = 8
+            self.XWord = p + 'Q'
+            self.XWordSize = 8
+            self.Sxword = p + 'q'
+            self.SxwordSize = 8
+        else:
+            self.Addr = p + 'I'
+            self.AddrSize = 4
+            self.Off = p + 'I'
+            self.OffSize = 4
+
+class DynamicEntry(DataSizes):
+    def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+        super().__init__(ptrsize, is_le)
+        self.ptrsize = ptrsize
+        if ptrsize == 64:
+            self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0]
+            self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
+            self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+    def write(self, ofile: T.BinaryIO) -> None:
+        if self.ptrsize == 64:
+            ofile.write(struct.pack(self.Sxword, self.d_tag))
+            ofile.write(struct.pack(self.XWord, self.val))
+        else:
+            ofile.write(struct.pack(self.Sword, self.d_tag))
+            ofile.write(struct.pack(self.Word, self.val))
+
+class SectionHeader(DataSizes):
+    def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+        super().__init__(ptrsize, is_le)
+        if ptrsize == 64:
+            is_64 = True
+        else:
+            is_64 = False
+# Elf64_Word
+        self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+        self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Addr
+        self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0]
+# Elf64_Off
+        self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+        self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+        self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_addralign = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_entsize = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+class Elf(DataSizes):
+    def __init__(self, bfile: str, verbose: bool = True) -> None:
+        self.bfile = bfile
+        self.verbose = verbose
+        self.sections = []  # type: T.List[SectionHeader]
+        self.dynamic = []   # type: T.List[DynamicEntry]
+        self.open_bf(bfile)
+        try:
+            (self.ptrsize, self.is_le) = self.detect_elf_type()
+            super().__init__(self.ptrsize, self.is_le)
+            self.parse_header()
+            self.parse_sections()
+            self.parse_dynamic()
+        except (struct.error, RuntimeError):
+            self.close_bf()
+            raise
+
+    def open_bf(self, bfile: str) -> None:
+        self.bf = None
+        self.bf_perms = None
+        try:
+            self.bf = open(bfile, 'r+b')
+        except PermissionError as e:
+            self.bf_perms = stat.S_IMODE(os.lstat(bfile).st_mode)
+            os.chmod(bfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+            try:
+                self.bf = open(bfile, 'r+b')
+            except Exception:
+                os.chmod(bfile, self.bf_perms)
+                self.bf_perms = None
+                raise e
+
+    def close_bf(self) -> None:
+        if self.bf is not None:
+            if self.bf_perms is not None:
+                os.fchmod(self.bf.fileno(), self.bf_perms)
+                self.bf_perms = None
+            self.bf.close()
+            self.bf = None
+
+    def __enter__(self) -> 'Elf':
+        return self
+
+    def __del__(self) -> None:
+        self.close_bf()
+
+    def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+        self.close_bf()
+
+    def detect_elf_type(self) -> T.Tuple[int, bool]:
+        data = self.bf.read(6)
+        if data[1:4] != b'ELF':
+            # This script gets called to non-elf targets too
+            # so just ignore them.
+            if self.verbose:
+                print('File "%s" is not an ELF file.' % self.bfile)
+            sys.exit(0)
+        if data[4] == 1:
+            ptrsize = 32
+        elif data[4] == 2:
+            ptrsize = 64
+        else:
+            sys.exit('File "%s" has unknown ELF class.' % self.bfile)
+        if data[5] == 1:
+            is_le = True
+        elif data[5] == 2:
+            is_le = False
+        else:
+            sys.exit('File "%s" has unknown ELF endianness.' % self.bfile)
+        return ptrsize, is_le
+
+    def parse_header(self) -> None:
+        self.bf.seek(0)
+        self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
+        self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_machine = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_version = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+        self.e_entry = struct.unpack(self.Addr, self.bf.read(self.AddrSize))[0]
+        self.e_phoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+        self.e_shoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+        self.e_flags = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+        self.e_ehsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_phentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_phnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_shentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+
+    def parse_sections(self) -> None:
+        self.bf.seek(self.e_shoff)
+        for _ in range(self.e_shnum):
+            self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
+
+    def read_str(self) -> bytes:
+        arr = []
+        x = self.bf.read(1)
+        while x != b'\0':
+            arr.append(x)
+            x = self.bf.read(1)
+            if x == b'':
+                raise RuntimeError('Tried to read past the end of the file')
+        return b''.join(arr)
+
+    def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
+        section_names = self.sections[self.e_shstrndx]
+        for i in self.sections:
+            self.bf.seek(section_names.sh_offset + i.sh_name)
+            name = self.read_str()
+            if name == target_name:
+                return i
+        return None
+
+    def parse_dynamic(self) -> None:
+        sec = self.find_section(b'.dynamic')
+        if sec is None:
+            return
+        self.bf.seek(sec.sh_offset)
+        while True:
+            e = DynamicEntry(self.bf, self.ptrsize, self.is_le)
+            self.dynamic.append(e)
+            if e.d_tag == 0:
+                break
+
+    def print_section_names(self) -> None:
+        section_names = self.sections[self.e_shstrndx]
+        for i in self.sections:
+            self.bf.seek(section_names.sh_offset + i.sh_name)
+            name = self.read_str()
+            print(name.decode())
+
+    def print_soname(self) -> None:
+        soname = None
+        strtab = None
+        for i in self.dynamic:
+            if i.d_tag == DT_SONAME:
+                soname = i
+            if i.d_tag == DT_STRTAB:
+                strtab = i
+        if soname is None or strtab is None:
+            print("This file does not have a soname")
+            return
+        self.bf.seek(strtab.val + soname.val)
+        print(self.read_str())
+
+    def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
+        sec = self.find_section(b'.dynstr')
+        for i in self.dynamic:
+            if i.d_tag == entrynum:
+                res = sec.sh_offset + i.val
+                assert isinstance(res, int)
+                return res
+        return None
+
+    def print_rpath(self) -> None:
+        offset = self.get_entry_offset(DT_RPATH)
+        if offset is None:
+            print("This file does not have an rpath.")
+        else:
+            self.bf.seek(offset)
+            print(self.read_str())
+
+    def print_runpath(self) -> None:
+        offset = self.get_entry_offset(DT_RUNPATH)
+        if offset is None:
+            print("This file does not have a runpath.")
+        else:
+            self.bf.seek(offset)
+            print(self.read_str())
+
+    def print_deps(self) -> None:
+        sec = self.find_section(b'.dynstr')
+        deps = []
+        for i in self.dynamic:
+            if i.d_tag == DT_NEEDED:
+                deps.append(i)
+        for i in deps:
+            offset = sec.sh_offset + i.val
+            self.bf.seek(offset)
+            name = self.read_str()
+            print(name)
+
+    def fix_deps(self, prefix: bytes) -> None:
+        sec = self.find_section(b'.dynstr')
+        deps = []
+        for i in self.dynamic:
+            if i.d_tag == DT_NEEDED:
+                deps.append(i)
+        for i in deps:
+            offset = sec.sh_offset + i.val
+            self.bf.seek(offset)
+            name = self.read_str()
+            if name.startswith(prefix):
+                basename = name.split(b'/')[-1]
+                padding = b'\0' * (len(name) - len(basename))
+                newname = basename + padding
+                assert(len(newname) == len(name))
+                self.bf.seek(offset)
+                self.bf.write(newname)
+
+    def fix_rpath(self, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes) -> None:
+        # The path to search for can be either rpath or runpath.
+        # Fix both of them to be sure.
+        self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
+        self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
+
+    def fix_rpathtype_entry(self, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes, entrynum: int) -> None:
+        rp_off = self.get_entry_offset(entrynum)
+        if rp_off is None:
+            if self.verbose:
+                print('File does not have rpath. It should be a fully static executable.')
+            return
+        self.bf.seek(rp_off)
+
+        old_rpath = self.read_str()
+        # Some rpath entries may come from multiple sources.
+        # Only add each one once.
+        new_rpaths = OrderedSet()  # type: OrderedSet[bytes]
+        if new_rpath:
+            new_rpaths.update(new_rpath.split(b':'))
+        if old_rpath:
+            # Filter out build-only rpath entries
+            # added by get_link_dep_subdirs() or
+            # specified by user with build_rpath.
+            for rpath_dir in old_rpath.split(b':'):
+                if not (rpath_dir in rpath_dirs_to_remove or
+                        rpath_dir == (b'X' * len(rpath_dir))):
+                    if rpath_dir:
+                        new_rpaths.add(rpath_dir)
+
+        # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+        new_rpath = b':'.join(new_rpaths)
+
+        if len(old_rpath) < len(new_rpath):
+            msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8'))
+            sys.exit(msg)
+        # The linker does read-only string deduplication. If there is a
+        # string that shares a suffix with the rpath, they might get
+        # dedupped. This means changing the rpath string might break something
+        # completely unrelated. This has already happened once with X.org.
+        # Thus we want to keep this change as small as possible to minimize
+        # the chance of obliterating other strings. It might still happen
+        # but our behavior is identical to what chrpath does and it has
+        # been in use for ages so based on that this should be rare.
+        if not new_rpath:
+            self.remove_rpath_entry(entrynum)
+        else:
+            self.bf.seek(rp_off)
+            self.bf.write(new_rpath)
+            self.bf.write(b'\0')
+
+    def remove_rpath_entry(self, entrynum: int) -> None:
+        sec = self.find_section(b'.dynamic')
+        if sec is None:
+            return None
+        for (i, entry) in enumerate(self.dynamic):
+            if entry.d_tag == entrynum:
+                rpentry = self.dynamic[i]
+                rpentry.d_tag = 0
+                self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry]
+                break
+        # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently.
+        for entry in self.dynamic[i:]:
+            if entry.d_tag == DT_MIPS_RLD_MAP_REL:
+                entry.val += 2 * (self.ptrsize // 8)
+                break
+        self.bf.seek(sec.sh_offset)
+        for entry in self.dynamic:
+            entry.write(self.bf)
+        return None
+
+def fix_elf(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
+    with Elf(fname, verbose) as e:
+        if new_rpath is None:
+            e.print_rpath()
+            e.print_runpath()
+        else:
+            e.fix_rpath(rpath_dirs_to_remove, new_rpath)
+
+def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
+    out = subprocess.check_output(['otool', '-l', fname],
+                                  universal_newlines=True,
+                                  stderr=subprocess.DEVNULL)
+    result = []
+    current_cmd = 'FOOBAR'
+    for line in out.split('\n'):
+        line = line.strip()
+        if ' ' not in line:
+            continue
+        key, value = line.strip().split(' ', 1)
+        if key == 'cmd':
+            current_cmd = value
+        if key == 'path' and current_cmd == 'LC_RPATH':
+            rp = value.split('(', 1)[0].strip()
+            result.append(rp)
+    return result
+
+def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
+    try:
+        rpaths = get_darwin_rpaths_to_remove(fname)
+    except subprocess.CalledProcessError:
+        # Otool failed, which happens when invoked on a
+        # non-executable target. Just return.
+        return
+    try:
+        args = []
+        if rpaths:
+            # TODO: fix this properly, not totally clear how
+            #
+            # removing rpaths from binaries on macOS has tons of
+            # weird edge cases. For instance, if the user provided
+            # a '-Wl,-rpath' argument in LDFLAGS that happens to
+            # coincide with an rpath generated from a dependency,
+            # this would cause installation failures, as meson would
+            # generate install_name_tool calls with two identical
+            # '-delete_rpath' arguments, which install_name_tool
+            # fails on. Because meson itself ensures that it never
+            # adds duplicate rpaths, duplicate rpaths necessarily
+            # come from user variables. The idea of using OrderedSet
+            # is to remove *at most one* duplicate RPATH entry. This
+            # is not optimal, as it only respects the user's choice
+            # partially: if they provided a non-duplicate '-Wl,-rpath'
+            # argument, it gets removed, if they provided a duplicate
+            # one, it remains in the final binary. A potentially optimal
+            # solution would split all user '-Wl,-rpath' arguments from
+            # LDFLAGS, and later add them back with '-add_rpath'.
+            for rp in OrderedSet(rpaths):
+                args += ['-delete_rpath', rp]
+            subprocess.check_call(['install_name_tool', fname] + args,
+                                  stdout=subprocess.DEVNULL,
+                                  stderr=subprocess.DEVNULL)
+        args = []
+        if new_rpath:
+            args += ['-add_rpath', new_rpath]
+        # Rewrite -install_name @rpath/libfoo.dylib to /path/to/libfoo.dylib
+        if fname.endswith('dylib'):
+            args += ['-id', final_path]
+        if install_name_mappings:
+            for old, new in install_name_mappings.items():
+                args += ['-change', old, new]
+        if args:
+            subprocess.check_call(['install_name_tool', fname] + args,
+                                  stdout=subprocess.DEVNULL,
+                                  stderr=subprocess.DEVNULL)
+    except Exception as err:
+        raise SystemExit(err)
+
+def fix_jar(fname: str) -> None:
+    subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF'])
+    with open('META-INF/MANIFEST.MF', 'r+', encoding='utf-8') as f:
+        lines = f.readlines()
+        f.seek(0)
+        for line in lines:
+            if not line.startswith('Class-Path:'):
+                f.write(line)
+        f.truncate()
+    subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
+
+def fix_rpath(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
+    global INSTALL_NAME_TOOL
+    # Static libraries, import libraries, debug information, headers, etc
+    # never have rpaths
+    # DLLs and EXE currently do not need runtime path fixing
+    if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')):
+        return
+    try:
+        if fname.endswith('.jar'):
+            fix_jar(fname)
+            return
+        if isinstance(new_rpath, str):
+            new_rpath = new_rpath.encode('utf8')
+        fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
+        return
+    except SystemExit as e:
+        if isinstance(e.code, int) and e.code == 0:
+            pass
+        else:
+            raise
+    # We don't look for this on import because it will do a useless PATH lookup
+    # on non-mac platforms. That can be expensive on some Windows machines
+    # (upto 30ms), which is significant with --only-changed. For details, see:
+    # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
+    if INSTALL_NAME_TOOL is False:
+        INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
+    if INSTALL_NAME_TOOL:
+        if isinstance(new_rpath, bytes):
+            new_rpath = new_rpath.decode('utf8')
+        fix_darwin(fname, new_rpath, final_path, install_name_mappings)
diff --git a/meson/mesonbuild/scripts/depscan.py b/meson/mesonbuild/scripts/depscan.py
new file mode 100644
index 000000000..9fc435b5d
--- /dev/null
+++ b/meson/mesonbuild/scripts/depscan.py
@@ -0,0 +1,201 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pathlib
+import pickle
+import re
+import os
+import sys
+import typing as T
+
+from ..backend.ninjabackend import TargetDependencyScannerInfo, ninja_quote
+from ..compilers.compilers import lang_suffixes
+
+CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);')
+CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);')
+
+FORTRAN_INCLUDE_PAT = r"^\s*include\s*['\"](\w+\.\w+)['\"]"
+FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
+FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
+FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+
+FORTRAN_MODULE_RE = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE)
+FORTRAN_SUBMOD_RE = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)
+FORTRAN_USE_RE = re.compile(FORTRAN_USE_PAT, re.IGNORECASE)
+
+class DependencyScanner:
+    def __init__(self, pickle_file: str, outfile: str, sources: T.List[str]):
+        with open(pickle_file, 'rb') as pf:
+            self.target_data = pickle.load(pf) # type: TargetDependencyScannerInfo
+        self.outfile = outfile
+        self.sources = sources
+        self.provided_by = {} # type: T.Dict[str, str]
+        self.exports = {} # type: T.Dict[str, str]
+        self.needs = {} # type: T.Dict[str, T.List[str]]
+        self.sources_with_exports = [] # type: T.List[str]
+
+    def scan_file(self, fname: str) -> None:
+        suffix = os.path.splitext(fname)[1][1:].lower()
+        if suffix in lang_suffixes['fortran']:
+            self.scan_fortran_file(fname)
+        elif suffix in lang_suffixes['cpp']:
+            self.scan_cpp_file(fname)
+        else:
+            sys.exit(f'Can not scan files with suffix .{suffix}.')
+
+    def scan_fortran_file(self, fname: str) -> None:
+        fpath = pathlib.Path(fname)
+        modules_in_this_file = set()
+        for line in fpath.read_text(encoding='utf-8').split('\n'):
+            import_match = FORTRAN_USE_RE.match(line)
+            export_match = FORTRAN_MODULE_RE.match(line)
+            submodule_export_match = FORTRAN_SUBMOD_RE.match(line)
+            if import_match:
+                needed = import_match.group(1).lower()
+                # In Fortran you have an using declaration also for the module
+                # you define in the same file. Prevent circular dependencies.
+                if needed not in modules_in_this_file:
+                    if fname in self.needs:
+                        self.needs[fname].append(needed)
+                    else:
+                        self.needs[fname] = [needed]
+            if export_match:
+                exported_module = export_match.group(1).lower()
+                assert(exported_module not in modules_in_this_file)
+                modules_in_this_file.add(exported_module)
+                if exported_module in self.provided_by:
+                    raise RuntimeError(f'Multiple files provide module {exported_module}.')
+                self.sources_with_exports.append(fname)
+                self.provided_by[exported_module] = fname
+                self.exports[fname] = exported_module
+            if submodule_export_match:
+                # Store submodule "Foo" "Bar" as "foo:bar".
+                # A submodule declaration can be both an import and an export declaration:
+                #
+                # submodule (a1:a2) a3
+                #  - requires a1@a2.smod
+                #  - produces a1@a3.smod
+                parent_module_name_full = submodule_export_match.group(1).lower()
+                parent_module_name = parent_module_name_full.split(':')[0]
+                submodule_name = submodule_export_match.group(2).lower()
+                concat_name = f'{parent_module_name}:{submodule_name}'
+                self.sources_with_exports.append(fname)
+                self.provided_by[concat_name] = fname
+                self.exports[fname] = concat_name
+                # Fortran requires that the immediate parent module must be built
+                # before the current one. Thus:
+                #
+                # submodule (parent) parent   <- requires parent.mod (really parent.smod, but they are created at the same time)
+                # submodule (a1:a2) a3        <- requires a1@a2.smod
+                #
+                # a3 does not depend on the a1 parent module directly, only transitively.
+                if fname in self.needs:
+                    self.needs[fname].append(parent_module_name_full)
+                else:
+                    self.needs[fname] = [parent_module_name_full]
+
+
+    def scan_cpp_file(self, fname: str) -> None:
+        fpath = pathlib.Path(fname)
+        for line in fpath.read_text(encoding='utf-8').split('\n'):
+            import_match = CPP_IMPORT_RE.match(line)
+            export_match = CPP_EXPORT_RE.match(line)
+            if import_match:
+                needed = import_match.group(1)
+                if fname in self.needs:
+                    self.needs[fname].append(needed)
+                else:
+                    self.needs[fname] = [needed]
+            if export_match:
+                exported_module = export_match.group(1)
+                if exported_module in self.provided_by:
+                    raise RuntimeError(f'Multiple files provide module {exported_module}.')
+                self.sources_with_exports.append(fname)
+                self.provided_by[exported_module] = fname
+                self.exports[fname] = exported_module
+
+    def objname_for(self, src: str) -> str:
+        objname = self.target_data.source2object[src]
+        assert(isinstance(objname, str))
+        return objname
+
+    def module_name_for(self, src: str) -> str:
+        suffix = os.path.splitext(src)[1][1:].lower()
+        if suffix in lang_suffixes['fortran']:
+            exported = self.exports[src]
+            # Module foo:bar goes to a file name foo@bar.smod
+            # Module Foo goes to a file name foo.mod
+            namebase = exported.replace(':', '@')
+            if ':' in exported:
+                extension = 'smod'
+            else:
+                extension = 'mod'
+            return os.path.join(self.target_data.private_dir, f'{namebase}.{extension}')
+        elif suffix in lang_suffixes['cpp']:
+            return '{}.ifc'.format(self.exports[src])
+        else:
+            raise RuntimeError('Unreachable code.')
+
+    def scan(self) -> int:
+        for s in self.sources:
+            self.scan_file(s)
+        with open(self.outfile, 'w', encoding='utf-8') as ofile:
+            ofile.write('ninja_dyndep_version = 1\n')
+            for src in self.sources:
+                objfilename = self.objname_for(src)
+                mods_and_submods_needed = []
+                module_files_generated = []
+                module_files_needed = []
+                if src in self.sources_with_exports:
+                    module_files_generated.append(self.module_name_for(src))
+                if src in self.needs:
+                    for modname in self.needs[src]:
+                        if modname not in self.provided_by:
+                            # Nothing provides this module, we assume that it
+                            # comes from a dependency library somewhere and is
+                            # already built by the time this compilation starts.
+                            pass
+                        else:
+                            mods_and_submods_needed.append(modname)
+
+                for modname in mods_and_submods_needed:
+                    provider_src = self.provided_by[modname]
+                    provider_modfile = self.module_name_for(provider_src)
+                    # Prune self-dependencies
+                    if provider_src != src:
+                        module_files_needed.append(provider_modfile)
+
+                quoted_objfilename = ninja_quote(objfilename, True)
+                quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated]
+                quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed]
+                if quoted_module_files_generated:
+                    mod_gen = '| ' + ' '.join(quoted_module_files_generated)
+                else:
+                    mod_gen = ''
+                if quoted_module_files_needed:
+                    mod_dep = '| '  + ' '.join(quoted_module_files_needed)
+                else:
+                    mod_dep = ''
+                build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename,
+                                                             mod_gen,
+                                                             mod_dep)
+                ofile.write(build_line + '\n')
+        return 0
+
+def run(args: T.List[str]) -> int:
+    pickle_file = args[0]
+    outfile = args[1]
+    sources = args[2:]
+    scanner = DependencyScanner(pickle_file, outfile, sources)
+    return scanner.scan()
diff --git a/meson/mesonbuild/scripts/dirchanger.py b/meson/mesonbuild/scripts/dirchanger.py
new file mode 100644
index 000000000..21632cd89
--- /dev/null
+++ b/meson/mesonbuild/scripts/dirchanger.py
@@ -0,0 +1,29 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''CD into dir given as first argument and execute
+the command given in the rest of the arguments.'''
+
+import os, subprocess, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+    dirname = args[0]
+    command = args[1:]
+
+    os.chdir(dirname)
+    return subprocess.call(command)
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/externalproject.py b/meson/mesonbuild/scripts/externalproject.py
new file mode 100644
index 000000000..a8e3bfe2f
--- /dev/null
+++ b/meson/mesonbuild/scripts/externalproject.py
@@ -0,0 +1,109 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import argparse
+import multiprocessing
+import subprocess
+from pathlib import Path
+import typing as T
+
+from ..mesonlib import Popen_safe
+
+class ExternalProject:
+    def __init__(self, options: argparse.Namespace):
+        self.name = options.name
+        self.src_dir = options.srcdir
+        self.build_dir = options.builddir
+        self.install_dir = options.installdir
+        self.log_dir = options.logdir
+        self.verbose = options.verbose
+        self.stampfile = options.stampfile
+        self.depfile = options.depfile
+        self.make = options.make
+
+    def write_depfile(self) -> None:
+        with open(self.depfile, 'w', encoding='utf-8') as f:
+            f.write(f'{self.stampfile}: \\\n')
+            for dirpath, dirnames, filenames in os.walk(self.src_dir):
+                dirnames[:] = [d for d in dirnames if not d.startswith('.')]
+                for fname in filenames:
+                    if fname.startswith('.'):
+                        continue
+                    path = Path(dirpath, fname)
+                    f.write('  {} \\\n'.format(path.as_posix().replace(' ', '\\ ')))
+
+    def write_stampfile(self) -> None:
+        with open(self.stampfile, 'w', encoding='utf-8') as f:
+            pass
+
+    def gnu_make(self) -> bool:
+        p, o, e = Popen_safe([self.make, '--version'])
+        if p.returncode == 0 and 'GNU Make' in o:
+            return True
+        return False
+
+    def build(self) -> int:
+        make_cmd = [self.make]
+        if self.gnu_make():
+            make_cmd.append('-j' + str(multiprocessing.cpu_count()))
+
+        rc = self._run('build', make_cmd)
+        if rc != 0:
+            return rc
+
+        install_cmd = make_cmd + ['DESTDIR= ' + self.install_dir, 'install']
+        rc = self._run('install', install_cmd)
+        if rc != 0:
+            return rc
+
+        self.write_depfile()
+        self.write_stampfile()
+
+        return 0
+
+    def _run(self, step: str, command: T.List[str]) -> int:
+        m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+        log_filename = Path(self.log_dir, f'{self.name}-{step}.log')
+        output = None
+        if not self.verbose:
+            output = open(log_filename, 'w', encoding='utf-8')
+            output.write(m + '\n')
+            output.flush()
+        else:
+            print(m)
+        p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output,
+                             cwd=self.build_dir)
+        if p.returncode != 0:
+            m = f'{step} step returned error code {p.returncode}.'
+            if not self.verbose:
+                m += '\nSee logs: ' + str(log_filename)
+            print(m)
+        return p.returncode
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--name')
+    parser.add_argument('--srcdir')
+    parser.add_argument('--builddir')
+    parser.add_argument('--installdir')
+    parser.add_argument('--logdir')
+    parser.add_argument('--make')
+    parser.add_argument('--verbose', action='store_true')
+    parser.add_argument('stampfile')
+    parser.add_argument('depfile')
+
+    options = parser.parse_args(args)
+    ep = ExternalProject(options)
+    return ep.build()
diff --git a/meson/mesonbuild/scripts/gettext.py b/meson/mesonbuild/scripts/gettext.py
new file mode 100644
index 000000000..b1ce6af1c
--- /dev/null
+++ b/meson/mesonbuild/scripts/gettext.py
@@ -0,0 +1,125 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+import argparse
+import subprocess
+from . import destdir_join
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--pkgname', default='')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('--langs', default='')
+parser.add_argument('--localedir', default='')
+parser.add_argument('--subdir', default='')
+parser.add_argument('--extra-args', default='')
+
+def read_linguas(src_sub: str) -> T.List[str]:
+    # Syntax of this file is documented here:
+    # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
+    linguas = os.path.join(src_sub, 'LINGUAS')
+    try:
+        langs = []
+        with open(linguas, encoding='utf-8') as f:
+            for line in f:
+                line = line.strip()
+                if line and not line.startswith('#'):
+                    langs += line.split()
+        return langs
+    except (FileNotFoundError, PermissionError):
+        print(f'Could not find file LINGUAS in {src_sub}')
+        return []
+
+def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int:
+    listfile = os.path.join(src_sub, 'POTFILES.in')
+    if not os.path.exists(listfile):
+        listfile = os.path.join(src_sub, 'POTFILES')
+        if not os.path.exists(listfile):
+            print('Could not find file POTFILES in %s' % src_sub)
+            return 1
+
+    child_env = os.environ.copy()
+    if datadirs:
+        child_env['GETTEXTDATADIRS'] = datadirs
+
+    ofile = os.path.join(src_sub, pkgname + '.pot')
+    return subprocess.call(['xgettext', '--package-name=' + pkgname, '-p', src_sub, '-f', listfile,
+                            '-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args,
+                           env=child_env)
+
+def gen_gmo(src_sub: str, bld_sub: str, langs: T.List[str]) -> int:
+    for l in langs:
+        subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'),
+                               '-o', os.path.join(bld_sub, l + '.gmo')])
+    return 0
+
+def update_po(src_sub: str, pkgname: str, langs: T.List[str]) -> int:
+    potfile = os.path.join(src_sub, pkgname + '.pot')
+    for l in langs:
+        pofile = os.path.join(src_sub, l + '.po')
+        if os.path.exists(pofile):
+            subprocess.check_call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
+        else:
+            subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
+    return 0
+
+def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.List[str]) -> int:
+    for l in langs:
+        srcfile = os.path.join(bld_sub, l + '.gmo')
+        outfile = os.path.join(dest, l, 'LC_MESSAGES',
+                               pkgname + '.mo')
+        tempfile = outfile + '.tmp'
+        os.makedirs(os.path.dirname(outfile), exist_ok=True)
+        shutil.copy2(srcfile, tempfile)
+        os.replace(tempfile, outfile)
+        if not os.getenv('MESON_INSTALL_QUIET', False):
+            print(f'Installing {srcfile} to {outfile}')
+    return 0
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    subcmd = options.command
+    langs = options.langs.split('@@') if options.langs else None
+    extra_args = options.extra_args.split('@@') if options.extra_args else []
+    subdir = os.environ.get('MESON_SUBDIR', '')
+    if options.subdir:
+        subdir = options.subdir
+    src_sub = os.path.join(os.environ['MESON_SOURCE_ROOT'], subdir)
+    bld_sub = os.path.join(os.environ['MESON_BUILD_ROOT'], subdir)
+
+    if not langs:
+        langs = read_linguas(src_sub)
+
+    if subcmd == 'pot':
+        return run_potgen(src_sub, options.pkgname, options.datadirs, extra_args)
+    elif subcmd == 'gen_gmo':
+        return gen_gmo(src_sub, bld_sub, langs)
+    elif subcmd == 'update_po':
+        if run_potgen(src_sub, options.pkgname, options.datadirs, extra_args) != 0:
+            return 1
+        return update_po(src_sub, options.pkgname, langs)
+    elif subcmd == 'install':
+        destdir = os.environ.get('DESTDIR', '')
+        dest = destdir_join(destdir, os.path.join(os.environ['MESON_INSTALL_PREFIX'],
+                                                  options.localedir))
+        if gen_gmo(src_sub, bld_sub, langs) != 0:
+            return 1
+        do_install(src_sub, bld_sub, dest, options.pkgname, langs)
+    else:
+        print('Unknown subcommand.')
+        return 1
+    return 0
diff --git a/meson/mesonbuild/scripts/gtkdochelper.py b/meson/mesonbuild/scripts/gtkdochelper.py
new file mode 100644
index 000000000..153c3d933
--- /dev/null
+++ b/meson/mesonbuild/scripts/gtkdochelper.py
@@ -0,0 +1,295 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import subprocess
+import shutil
+import argparse
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
+from . import destdir_join
+import typing as T
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--sourcedir', dest='sourcedir')
+parser.add_argument('--builddir', dest='builddir')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--headerdirs', dest='headerdirs')
+parser.add_argument('--mainfile', dest='mainfile')
+parser.add_argument('--modulename', dest='modulename')
+parser.add_argument('--moduleversion', dest='moduleversion')
+parser.add_argument('--htmlargs', dest='htmlargs', default='')
+parser.add_argument('--scanargs', dest='scanargs', default='')
+parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='')
+parser.add_argument('--gobjects-types-file', dest='gobject_typesfile', default='')
+parser.add_argument('--fixxrefargs', dest='fixxrefargs', default='')
+parser.add_argument('--mkdbargs', dest='mkdbargs', default='')
+parser.add_argument('--ld', dest='ld', default='')
+parser.add_argument('--cc', dest='cc', default='')
+parser.add_argument('--ldflags', dest='ldflags', default='')
+parser.add_argument('--cflags', dest='cflags', default='')
+parser.add_argument('--content-files', dest='content_files', default='')
+parser.add_argument('--expand-content-files', dest='expand_content_files', default='')
+parser.add_argument('--html-assets', dest='html_assets', default='')
+parser.add_argument('--ignore-headers', dest='ignore_headers', default='')
+parser.add_argument('--namespace', dest='namespace', default='')
+parser.add_argument('--mode', dest='mode', default='')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--run', dest='run', default='')
+for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+    program_name = 'gtkdoc-' + tool
+    parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
+
+def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
+    if library_paths is None:
+        library_paths = []
+
+    env = dict(os.environ)
+    if is_windows() or is_cygwin():
+        if 'PATH' in env:
+            library_paths.extend(env['PATH'].split(os.pathsep))
+        env['PATH'] = os.pathsep.join(library_paths)
+    else:
+        if 'LD_LIBRARY_PATH' in env:
+            library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
+        env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+
+    if is_windows():
+        cmd.insert(0, sys.executable)
+
+    # Put stderr into stdout since we want to print it out anyway.
+    # This preserves the order of messages.
+    p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
+    if p.returncode != 0:
+        err_msg = [f"{cmd!r} failed with status {p.returncode:d}"]
+        if out:
+            err_msg.append(out)
+        raise MesonException('\n'.join(err_msg))
+    elif out:
+        # Unfortunately Windows cmd.exe consoles may be using a codepage
+        # that might choke print() with a UnicodeEncodeError, so let's
+        # ignore such errors for now, as a compromise as we are outputting
+        # console output here...
+        try:
+            print(out)
+        except UnicodeEncodeError:
+            pass
+
+def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
+                 main_file: str, module: str, module_version: str,
+                 html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
+                 gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
+                 html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
+                 expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
+    print("Building documentation for %s" % module)
+
+    src_dir_args = []
+    for src_dir in src_subdirs:
+        if not os.path.isabs(src_dir):
+            dirs = [os.path.join(source_root, src_dir),
+                    os.path.join(build_root, src_dir)]
+        else:
+            dirs = [src_dir]
+        src_dir_args += ['--source-dir=' + d for d in dirs]
+
+    doc_src = os.path.join(source_root, doc_subdir)
+    abs_out = os.path.join(build_root, doc_subdir)
+    htmldir = os.path.join(abs_out, 'html')
+
+    content_files += [main_file]
+    sections = os.path.join(doc_src, module + "-sections.txt")
+    if os.path.exists(sections):
+        content_files.append(sections)
+
+    overrides = os.path.join(doc_src, module + "-overrides.txt")
+    if os.path.exists(overrides):
+        content_files.append(overrides)
+
+    # Copy files to build directory
+    for f in content_files:
+        # FIXME: Use mesonlib.File objects so we don't need to do this
+        if not os.path.isabs(f):
+            f = os.path.join(doc_src, f)
+        elif os.path.commonpath([f, build_root]) == build_root:
+            continue
+        shutil.copyfile(f, os.path.join(abs_out, os.path.basename(f)))
+
+    shutil.rmtree(htmldir, ignore_errors=True)
+    try:
+        os.mkdir(htmldir)
+    except Exception:
+        pass
+
+    for f in html_assets:
+        f_abs = os.path.join(doc_src, f)
+        shutil.copyfile(f_abs, os.path.join(htmldir, os.path.basename(f_abs)))
+
+    scan_cmd = [options.gtkdoc_scan, '--module=' + module] + src_dir_args
+    if ignore_headers:
+        scan_cmd.append('--ignore-headers=' + ' '.join(ignore_headers))
+    # Add user-specified arguments
+    scan_cmd += scan_args
+    gtkdoc_run_check(scan_cmd, abs_out)
+
+    # Use the generated types file when available, otherwise gobject_typesfile
+    # would often be a path to source dir instead of build dir.
+    if '--rebuild-types' in scan_args:
+        gobject_typesfile = os.path.join(abs_out, module + '.types')
+
+    if gobject_typesfile:
+        scanobjs_cmd = [options.gtkdoc_scangobj] + scanobjs_args
+        scanobjs_cmd += ['--types=' + gobject_typesfile,
+                         '--module=' + module,
+                         '--run=' + run,
+                         '--cflags=' + cflags,
+                         '--ldflags=' + ldflags,
+                         '--cc=' + cc,
+                         '--ld=' + ld,
+                         '--output-dir=' + abs_out]
+
+        library_paths = []
+        for ldflag in split_args(ldflags):
+            if ldflag.startswith('-Wl,-rpath,'):
+                library_paths.append(ldflag[11:])
+
+        gtkdoc_run_check(scanobjs_cmd, build_root, library_paths)
+
+    # Make docbook files
+    if mode == 'auto':
+        # Guessing is probably a poor idea but these keeps compat
+        # with previous behavior
+        if main_file.endswith('sgml'):
+            modeflag = '--sgml-mode'
+        else:
+            modeflag = '--xml-mode'
+    elif mode == 'xml':
+        modeflag = '--xml-mode'
+    elif mode == 'sgml':
+        modeflag = '--sgml-mode'
+    else: # none
+        modeflag = None
+
+    mkdb_cmd = [options.gtkdoc_mkdb,
+                '--module=' + module,
+                '--output-format=xml',
+                '--expand-content-files=' + ' '.join(expand_content_files),
+                ] + src_dir_args
+    if namespace:
+        mkdb_cmd.append('--name-space=' + namespace)
+    if modeflag:
+        mkdb_cmd.append(modeflag)
+    if main_file:
+        # Yes, this is the flag even if the file is in xml.
+        mkdb_cmd.append('--main-sgml-file=' + main_file)
+    # Add user-specified arguments
+    mkdb_cmd += mkdb_args
+    gtkdoc_run_check(mkdb_cmd, abs_out)
+
+    # Make HTML documentation
+    mkhtml_cmd = [options.gtkdoc_mkhtml,
+                  '--path=' + ':'.join((doc_src, abs_out)),
+                  module,
+                  ] + html_args
+    if main_file:
+        mkhtml_cmd.append('../' + main_file)
+    else:
+        mkhtml_cmd.append('%s-docs.xml' % module)
+    # html gen must be run in the HTML dir
+    gtkdoc_run_check(mkhtml_cmd, htmldir)
+
+    # Fix cross-references in HTML files
+    fixref_cmd = [options.gtkdoc_fixxref,
+                  '--module=' + module,
+                  '--module-dir=html'] + fixxref_args
+    gtkdoc_run_check(fixref_cmd, abs_out)
+
+    if module_version:
+        shutil.move(os.path.join(htmldir, f'{module}.devhelp2'),
+                    os.path.join(htmldir, f'{module}-{module_version}.devhelp2'))
+
+def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
+    source = os.path.join(build_root, doc_subdir, 'html')
+    final_destination = os.path.join(install_prefix, datadir, module)
+    shutil.rmtree(final_destination, ignore_errors=True)
+    shutil.copytree(source, final_destination)
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    if options.htmlargs:
+        htmlargs = options.htmlargs.split('@@')
+    else:
+        htmlargs = []
+    if options.scanargs:
+        scanargs = options.scanargs.split('@@')
+    else:
+        scanargs = []
+    if options.scanobjsargs:
+        scanobjsargs = options.scanobjsargs.split('@@')
+    else:
+        scanobjsargs = []
+    if options.fixxrefargs:
+        fixxrefargs = options.fixxrefargs.split('@@')
+    else:
+        fixxrefargs = []
+    if options.mkdbargs:
+        mkdbargs = options.mkdbargs.split('@@')
+    else:
+        mkdbargs = []
+    build_gtkdoc(
+        options.sourcedir,
+        options.builddir,
+        options.subdir,
+        options.headerdirs.split('@@'),
+        options.mainfile,
+        options.modulename,
+        options.moduleversion,
+        htmlargs,
+        scanargs,
+        fixxrefargs,
+        mkdbargs,
+        options.gobject_typesfile,
+        scanobjsargs,
+        options.run,
+        options.ld,
+        options.cc,
+        options.ldflags,
+        options.cflags,
+        options.html_assets.split('@@') if options.html_assets else [],
+        options.content_files.split('@@') if options.content_files else [],
+        options.ignore_headers.split('@@') if options.ignore_headers else [],
+        options.namespace,
+        options.expand_content_files.split('@@') if options.expand_content_files else [],
+        options.mode,
+        options)
+
+    if 'MESON_INSTALL_PREFIX' in os.environ:
+        destdir = os.environ.get('DESTDIR', '')
+        install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX'])
+        if options.install_dir:
+            install_dir = options.install_dir
+        else:
+            install_dir = options.modulename
+            if options.moduleversion:
+                install_dir += '-' + options.moduleversion
+        if os.path.isabs(install_dir):
+            install_dir = destdir_join(destdir, install_dir)
+        install_gtkdoc(options.builddir,
+                       options.subdir,
+                       install_prefix,
+                       'share/gtk-doc/html',
+                       install_dir)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/hotdochelper.py b/meson/mesonbuild/scripts/hotdochelper.py
new file mode 100644
index 000000000..a96a34afa
--- /dev/null
+++ b/meson/mesonbuild/scripts/hotdochelper.py
@@ -0,0 +1,38 @@
+import os
+import shutil
+import subprocess
+
+from . import destdir_join
+
+import argparse
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--install')
+parser.add_argument('--extra-extension-path', action="append", default=[])
+parser.add_argument('--name')
+parser.add_argument('--builddir')
+parser.add_argument('--project-version')
+
+
+def run(argv: T.List[str]) -> int:
+    options, args = parser.parse_known_args(argv)
+    subenv = os.environ.copy()
+
+    for ext_path in options.extra_extension_path:
+        subenv['PYTHONPATH'] = subenv.get('PYTHONPATH', '') + ':' + ext_path
+
+    res = subprocess.call(args, cwd=options.builddir, env=subenv)
+    if res != 0:
+        return res
+
+    if options.install:
+        source_dir = os.path.join(options.builddir, options.install)
+        destdir = os.environ.get('DESTDIR', '')
+        installdir = destdir_join(destdir,
+                                  os.path.join(os.environ['MESON_INSTALL_PREFIX'],
+                                               'share/doc/', options.name, "html"))
+
+        shutil.rmtree(installdir, ignore_errors=True)
+        shutil.copytree(source_dir, installdir)
+    return 0
diff --git a/meson/mesonbuild/scripts/meson_exe.py b/meson/mesonbuild/scripts/meson_exe.py
new file mode 100644
index 000000000..cd3534cb0
--- /dev/null
+++ b/meson/mesonbuild/scripts/meson_exe.py
@@ -0,0 +1,125 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+import pickle
+import subprocess
+import typing as T
+import locale
+
+from .. import mesonlib
+from ..backend.backends import ExecutableSerialisation
+
+options = None
+
+def buildparser() -> argparse.ArgumentParser:
+    parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
+    parser.add_argument('--unpickle')
+    parser.add_argument('--capture')
+    parser.add_argument('--feed')
+    return parser
+
+def run_exe(exe: ExecutableSerialisation, extra_env: T.Optional[dict] = None) -> int:
+    if exe.exe_runner:
+        if not exe.exe_runner.found():
+            raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
+                                 'wrapper {!r}'.format(exe.cmd_args[0], exe.exe_runner.get_path()))
+        cmd_args = exe.exe_runner.get_command() + exe.cmd_args
+    else:
+        cmd_args = exe.cmd_args
+    child_env = os.environ.copy()
+    if extra_env:
+        child_env.update(extra_env)
+    if exe.env:
+        child_env = exe.env.get_env(child_env)
+    if exe.extra_paths:
+        child_env['PATH'] = (os.pathsep.join(exe.extra_paths + ['']) +
+                             child_env['PATH'])
+        if exe.exe_runner and mesonlib.substring_is_in_list('wine', exe.exe_runner.get_command()):
+            child_env['WINEPATH'] = mesonlib.get_wine_shortpath(
+                exe.exe_runner.get_command(),
+                ['Z:' + p for p in exe.extra_paths] + child_env.get('WINEPATH', '').split(';')
+            )
+
+    stdin = None
+    if exe.feed:
+        stdin = open(exe.feed, 'rb')
+
+    pipe = subprocess.PIPE
+    if exe.verbose:
+        assert not exe.capture, 'Cannot capture and print to console at the same time'
+        pipe = None
+
+    p = subprocess.Popen(cmd_args, env=child_env, cwd=exe.workdir,
+                         close_fds=False, stdin=stdin, stdout=pipe, stderr=pipe)
+    stdout, stderr = p.communicate()
+
+    if stdin is not None:
+        stdin.close()
+
+    if p.returncode == 0xc0000135:
+        # STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose
+        raise FileNotFoundError('due to missing DLLs')
+
+    if p.returncode != 0:
+        if exe.pickled:
+            print(f'while executing {cmd_args!r}')
+        if exe.verbose:
+            return p.returncode
+        encoding = locale.getpreferredencoding()
+        if not exe.capture:
+            print('--- stdout ---')
+            print(stdout.decode(encoding=encoding, errors='replace'))
+        print('--- stderr ---')
+        print(stderr.decode(encoding=encoding, errors='replace'))
+        return p.returncode
+
+    if exe.capture:
+        skip_write = False
+        try:
+            with open(exe.capture, 'rb') as cur:
+                skip_write = cur.read() == stdout
+        except OSError:
+            pass
+        if not skip_write:
+            with open(exe.capture, 'wb') as output:
+                output.write(stdout)
+
+    return 0
+
+def run(args: T.List[str]) -> int:
+    global options
+    parser = buildparser()
+    options, cmd_args = parser.parse_known_args(args)
+    # argparse supports double dash to separate options and positional arguments,
+    # but the user has to remove it manually.
+    if cmd_args and cmd_args[0] == '--':
+        cmd_args = cmd_args[1:]
+    if not options.unpickle and not cmd_args:
+        parser.error('either --unpickle or executable and arguments are required')
+    if options.unpickle:
+        if cmd_args or options.capture or options.feed:
+            parser.error('no other arguments can be used with --unpickle')
+        with open(options.unpickle, 'rb') as f:
+            exe = pickle.load(f)
+            exe.pickled = True
+    else:
+        exe = ExecutableSerialisation(cmd_args, capture=options.capture, feed=options.feed)
+
+    return run_exe(exe)
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/msgfmthelper.py b/meson/mesonbuild/scripts/msgfmthelper.py
new file mode 100644
index 000000000..3ddc9e6a9
--- /dev/null
+++ b/meson/mesonbuild/scripts/msgfmthelper.py
@@ -0,0 +1,37 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import subprocess
+import os
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('input')
+parser.add_argument('output')
+parser.add_argument('type')
+parser.add_argument('podir')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
+
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    env = None
+    if options.datadirs:
+        env = os.environ.copy()
+        env.update({'GETTEXTDATADIRS': options.datadirs})
+    return subprocess.call(['msgfmt', '--' + options.type, '-d', options.podir,
+                            '--template', options.input,  '-o', options.output] + options.args,
+                           env=env)
diff --git a/meson/mesonbuild/scripts/regen_checker.py b/meson/mesonbuild/scripts/regen_checker.py
new file mode 100644
index 000000000..c96bdc1e5
--- /dev/null
+++ b/meson/mesonbuild/scripts/regen_checker.py
@@ -0,0 +1,64 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import pickle, subprocess
+import typing as T
+from ..coredata import CoreData
+from ..backend.backends import RegenInfo
+from ..mesonlib import OptionKey
+
+# This could also be used for XCode.
+
+def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool:
+    for i in regeninfo.depfiles:
+        curfile = os.path.join(regeninfo.build_dir, i)
+        curtime = os.stat(curfile).st_mtime
+        if curtime > regen_timestamp:
+            return True
+    # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.
+    # We must make sure to recreate it, even if we do not regenerate the solution.
+    # Otherwise, Visual Studio will always consider the REGEN project out of date.
+    print("Everything is up-to-date, regeneration of build files is not needed.")
+    from ..backend.vs2010backend import Vs2010Backend
+    Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
+    return False
+
+def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None:
+    cmd = meson_command + ['--internal',
+                           'regenerate',
+                           regeninfo.build_dir,
+                           regeninfo.source_dir,
+                           '--backend=' + backend]
+    subprocess.check_call(cmd)
+
+def run(args: T.List[str]) -> int:
+    private_dir = args[0]
+    dumpfile = os.path.join(private_dir, 'regeninfo.dump')
+    coredata_file = os.path.join(private_dir, 'coredata.dat')
+    with open(dumpfile, 'rb') as f:
+        regeninfo = pickle.load(f)
+        assert isinstance(regeninfo, RegenInfo)
+    with open(coredata_file, 'rb') as f:
+        coredata = pickle.load(f)
+        assert isinstance(coredata, CoreData)
+    backend = coredata.get_option(OptionKey('backend'))
+    assert isinstance(backend, str)
+    regen_timestamp = os.stat(dumpfile).st_mtime
+    if need_regen(regeninfo, regen_timestamp):
+        regen(regeninfo, coredata.meson_command, backend)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/scanbuild.py b/meson/mesonbuild/scripts/scanbuild.py
new file mode 100644
index 000000000..bb8e30ce6
--- /dev/null
+++ b/meson/mesonbuild/scripts/scanbuild.py
@@ -0,0 +1,65 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import shutil
+import tempfile
+from ..environment import detect_ninja, detect_scanbuild
+from ..coredata import get_cmd_line_file, CmdLineFileParser
+from ..mesonlib import windows_proof_rmtree
+from pathlib import Path
+import typing as T
+from ast import literal_eval
+import os
+
+def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
+    # In case of problems leave the temp directory around
+    # so it can be debugged.
+    scandir = tempfile.mkdtemp(dir=str(privdir))
+    meson_cmd = exelist + args
+    build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
+    rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
+    if rc != 0:
+        return rc
+    rc = subprocess.call(build_cmd)
+    if rc == 0:
+        windows_proof_rmtree(scandir)
+    return rc
+
+def run(args: T.List[str]) -> int:
+    srcdir = Path(args[0])
+    bldpath = Path(args[1])
+    blddir = args[1]
+    meson_cmd = args[2:]
+    privdir = bldpath / 'meson-private'
+    logdir = bldpath / 'meson-logs' / 'scanbuild'
+    shutil.rmtree(str(logdir), ignore_errors=True)
+
+    # if any cross or native files are specified we should use them
+    cmd = get_cmd_line_file(blddir)
+    data = CmdLineFileParser()
+    data.read(cmd)
+
+    if 'cross_file' in data['properties']:
+        meson_cmd.extend([f'--cross-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['cross_file'])])
+
+    if 'native_file' in data['properties']:
+        meson_cmd.extend([f'--native-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['native_file'])])
+
+    exelist = detect_scanbuild()
+    if not exelist:
+        print('Could not execute scan-build "%s"' % ' '.join(exelist))
+        return 1
+
+    return scanbuild(exelist, srcdir, bldpath, privdir, logdir, meson_cmd)
diff --git a/meson/mesonbuild/scripts/symbolextractor.py b/meson/mesonbuild/scripts/symbolextractor.py
new file mode 100644
index 000000000..17501e28b
--- /dev/null
+++ b/meson/mesonbuild/scripts/symbolextractor.py
@@ -0,0 +1,331 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script extracts the symbols of a given shared library
+# into a file. If the symbols have not changed, the file is not
+# touched. This information is used to skip link steps if the
+# ABI has not changed.
+
+# This file is basically a reimplementation of
+# http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c
+
+import typing as T
+import os, sys
+from .. import mesonlib
+from .. import mlog
+from ..mesonlib import Popen_safe
+import argparse
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--cross-host', default=None, dest='cross_host',
+                    help='cross compilation host platform')
+parser.add_argument('args', nargs='+')
+
+TOOL_WARNING_FILE = None
+RELINKING_WARNING = 'Relinking will always happen on source changes.'
+
+def dummy_syms(outfilename: str) -> None:
+    """Just touch it so relinking happens always."""
+    with open(outfilename, 'w', encoding='utf-8'):
+        pass
+
+def write_if_changed(text: str, outfilename: str) -> None:
+    try:
+        with open(outfilename, encoding='utf-8') as f:
+            oldtext = f.read()
+        if text == oldtext:
+            return
+    except FileNotFoundError:
+        pass
+    with open(outfilename, 'w', encoding='utf-8') as f:
+        f.write(text)
+
+def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
+    global TOOL_WARNING_FILE
+    if os.path.exists(TOOL_WARNING_FILE):
+        return
+    m = f'{tools!r} {msg}. {RELINKING_WARNING}'
+    if stderr:
+        m += '\n' + stderr
+    mlog.warning(m)
+    # Write it out so we don't warn again
+    with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+        pass
+
+def get_tool(name: str) -> T.List[str]:
+    evar = name.upper()
+    if evar in os.environ:
+        import shlex
+        return shlex.split(os.environ[evar])
+    return [name]
+
+def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
+    tool = get_tool(name)
+    try:
+        p, output, e = Popen_safe(tool + args, **kwargs)
+    except FileNotFoundError:
+        print_tool_warning(tool, 'not found')
+        return None
+    except PermissionError:
+        print_tool_warning(tool, 'not usable')
+        return None
+    if p.returncode != 0:
+        print_tool_warning(tool, 'does not work', e)
+        return None
+    return output
+
+def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
+    try:
+        p, output, e = Popen_safe(tool, **kwargs)
+    except FileNotFoundError:
+        return None, '{!r} not found\n'.format(tool[0])
+    except PermissionError:
+        return None, '{!r} not usable\n'.format(tool[0])
+    if p.returncode != 0:
+        return None, e
+    return output, None
+
+def gnu_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert(len(result) <= 1)
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    for line in output.split('\n'):
+        if not line:
+            continue
+        line_split = line.split()
+        entry = line_split[0:2]
+        # Store the size of symbols pointing to data objects so we relink
+        # when those change, which is needed because of copy relocations
+        # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+        if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4:
+            entry += [line_split[3]]
+        result += [' '.join(entry)]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def solaris_syms(libfilename: str, outfilename: str) -> None:
+    # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+    origpath = os.environ['PATH']
+    try:
+        os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+        gnu_syms(libfilename, outfilename)
+    finally:
+        os.environ['PATH'] = origpath
+
+def osx_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('otool', ['-l', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    arr = output.split('\n')
+    for (i, val) in enumerate(arr):
+        if 'LC_ID_DYLIB' in val:
+            match = i
+            break
+    result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant.
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def openbsd_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert(len(result) <= 1)
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    # U = undefined (cope with the lack of --defined-only option)
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def freebsd_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert(len(result) <= 1)
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def cygwin_syms(impfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('dlltool', ['-I', impfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [output]
+    # Get the list of all symbols exported
+    output = call_tool('nm', ['--extern-only', '--defined-only',
+                              '--format=posix', impfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    for line in output.split('\n'):
+        if ' T ' not in line:
+            continue
+        result.append(line.split(maxsplit=1)[0])
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]:
+    all_stderr = ''
+    # First try lib.exe, which is provided by MSVC. Then llvm-lib.exe, by LLVM
+    # for clang-cl.
+    #
+    # We cannot call get_tool on `lib` because it will look at the `LIB` env
+    # var which is the list of library paths MSVC will search for import
+    # libraries while linking.
+    for lib in (['lib'], get_tool('llvm-lib')):
+        output, e = call_tool_nowarn(lib + ['-list', impfilename])
+        if output:
+            # The output is a list of DLLs that each symbol exported by the import
+            # library is available in. We only build import libraries that point to
+            # a single DLL, so we can pick any of these. Pick the last one for
+            # simplicity. Also skip the last line, which is empty.
+            return output.split('\n')[-2:-1], None
+        all_stderr += e
+    # Next, try dlltool.exe which is provided by MinGW
+    output, e = call_tool_nowarn(get_tool('dlltool') + ['-I', impfilename])
+    if output:
+        return [output], None
+    all_stderr += e
+    return ([], all_stderr)
+
+def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
+    all_stderr = ''
+    # Force dumpbin.exe to use en-US so we can parse its output
+    env = os.environ.copy()
+    env['VSLANG'] = '1033'
+    output, e = call_tool_nowarn(get_tool('dumpbin') + ['-exports', impfilename], env=env)
+    if output:
+        lines = output.split('\n')
+        start = lines.index('File Type: LIBRARY')
+        end = lines.index('  Summary')
+        return lines[start:end], None
+    all_stderr += e
+    # Next, try llvm-nm.exe provided by LLVM, then nm.exe provided by MinGW
+    for nm in ('llvm-nm', 'nm'):
+        output, e = call_tool_nowarn(get_tool(nm) + ['--extern-only', '--defined-only',
+                                                     '--format=posix', impfilename])
+        if output:
+            result = []
+            for line in output.split('\n'):
+                if ' T ' not in line or line.startswith('.text'):
+                    continue
+                result.append(line.split(maxsplit=1)[0])
+            return result, None
+        all_stderr += e
+    return ([], all_stderr)
+
+def windows_syms(impfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    result, e = _get_implib_dllname(impfilename)
+    if not result:
+        print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
+        dummy_syms(outfilename)
+        return
+    # Get a list of all symbols exported
+    symbols, e = _get_implib_exports(impfilename)
+    if not symbols:
+        print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
+        dummy_syms(outfilename)
+        return
+    result += symbols
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
+    if cross_host is not None:
+        # In case of cross builds just always relink. In theory we could
+        # determine the correct toolset, but we would need to use the correct
+        # `nm`, `readelf`, etc, from the cross info which requires refactoring.
+        dummy_syms(outfilename)
+    elif mesonlib.is_linux() or mesonlib.is_hurd():
+        gnu_syms(libfilename, outfilename)
+    elif mesonlib.is_osx():
+        osx_syms(libfilename, outfilename)
+    elif mesonlib.is_openbsd():
+        openbsd_syms(libfilename, outfilename)
+    elif mesonlib.is_freebsd():
+        freebsd_syms(libfilename, outfilename)
+    elif mesonlib.is_windows():
+        if os.path.isfile(impfilename):
+            windows_syms(impfilename, outfilename)
+        else:
+            # No import library. Not sure how the DLL is being used, so just
+            # rebuild everything that links to it every time.
+            dummy_syms(outfilename)
+    elif mesonlib.is_cygwin():
+        if os.path.isfile(impfilename):
+            cygwin_syms(impfilename, outfilename)
+        else:
+            # No import library. Not sure how the DLL is being used, so just
+            # rebuild everything that links to it every time.
+            dummy_syms(outfilename)
+    elif mesonlib.is_sunos():
+        solaris_syms(libfilename, outfilename)
+    else:
+        if not os.path.exists(TOOL_WARNING_FILE):
+            mlog.warning('Symbol extracting has not been implemented for this '
+                         'platform. ' + RELINKING_WARNING)
+            # Write it out so we don't warn again
+            with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+                pass
+        dummy_syms(outfilename)
+
+def run(args: T.List[str]) -> int:
+    global TOOL_WARNING_FILE
+    options = parser.parse_args(args)
+    if len(options.args) != 4:
+        print('symbolextractor.py   ')
+        sys.exit(1)
+    privdir = os.path.join(options.args[0], 'meson-private')
+    TOOL_WARNING_FILE = os.path.join(privdir, 'symbolextractor_tool_warning_printed')
+    libfile = options.args[1]
+    impfile = options.args[2] # Only used on Windows
+    outfile = options.args[3]
+    gen_symbols(libfile, impfile, outfile, options.cross_host)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/tags.py b/meson/mesonbuild/scripts/tags.py
new file mode 100644
index 000000000..9098efb39
--- /dev/null
+++ b/meson/mesonbuild/scripts/tags.py
@@ -0,0 +1,53 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+from pathlib import Path
+import typing as T
+
+def ls_as_bytestream() -> bytes:
+    if os.path.exists('.git'):
+        return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
+                              stdout=subprocess.PIPE).stdout
+
+    files = [str(p) for p in Path('.').glob('**/*')
+             if not p.is_dir() and
+             not next((x for x in p.parts if x.startswith('.')), None)]
+    return '\n'.join(files).encode()
+
+
+def cscope() -> int:
+    ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
+    return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
+
+
+def ctags() -> int:
+    ls = ls_as_bytestream()
+    return subprocess.run(['ctags', '-L-'], input=ls).returncode
+
+
+def etags() -> int:
+    ls = ls_as_bytestream()
+    return subprocess.run(['etags', '-'], input=ls).returncode
+
+
+def run(args: T.List[str]) -> int:
+    tool_name = args[0]
+    srcdir_name = args[1]
+    os.chdir(srcdir_name)
+    assert tool_name in ['cscope', 'ctags', 'etags']
+    res = globals()[tool_name]()
+    assert isinstance(res, int)
+    return res
diff --git a/meson/mesonbuild/scripts/uninstall.py b/meson/mesonbuild/scripts/uninstall.py
new file mode 100644
index 000000000..f08490fbd
--- /dev/null
+++ b/meson/mesonbuild/scripts/uninstall.py
@@ -0,0 +1,50 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import typing as T
+
+logfile = 'meson-logs/install-log.txt'
+
+def do_uninstall(log: str) -> None:
+    failures = 0
+    successes = 0
+    for line in open(log, encoding='utf-8'):
+        if line.startswith('#'):
+            continue
+        fname = line.strip()
+        try:
+            if os.path.isdir(fname) and not os.path.islink(fname):
+                os.rmdir(fname)
+            else:
+                os.unlink(fname)
+            print('Deleted:', fname)
+            successes += 1
+        except Exception as e:
+            print(f'Could not delete {fname}: {e}.')
+            failures += 1
+    print('\nUninstall finished.\n')
+    print('Deleted:', successes)
+    print('Failed:', failures)
+    print('\nRemember that files created by custom scripts have not been removed.')
+
+def run(args: T.List[str]) -> int:
+    if args:
+        print('Weird error.')
+        return 1
+    if not os.path.exists(logfile):
+        print('Log file does not exist, no installation has been done.')
+        return 0
+    do_uninstall(logfile)
+    return 0
diff --git a/meson/mesonbuild/scripts/vcstagger.py b/meson/mesonbuild/scripts/vcstagger.py
new file mode 100644
index 000000000..18cf5f7f0
--- /dev/null
+++ b/meson/mesonbuild/scripts/vcstagger.py
@@ -0,0 +1,44 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os, subprocess, re
+import typing as T
+
+def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
+    try:
+        output = subprocess.check_output(cmd, cwd=source_dir)
+        new_string = re.search(regex_selector, output.decode()).group(1).strip()
+    except Exception:
+        new_string = fallback
+
+    with open(infile, encoding='utf-8') as f:
+        new_data = f.read().replace(replace_string, new_string)
+    if os.path.exists(outfile):
+        with open(outfile, encoding='utf-8') as f:
+            needs_update = (f.read() != new_data)
+    else:
+        needs_update = True
+    if needs_update:
+        with open(outfile, 'w', encoding='utf-8') as f:
+            f.write(new_data)
+
+
+def run(args: T.List[str]) -> int:
+    infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
+    command = args[6:]
+    config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/yelphelper.py b/meson/mesonbuild/scripts/yelphelper.py
new file mode 100644
index 000000000..374104bea
--- /dev/null
+++ b/meson/mesonbuild/scripts/yelphelper.py
@@ -0,0 +1,133 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import shutil
+import argparse
+from .. import mlog
+from ..mesonlib import has_path_sep
+from . import destdir_join
+from .gettext import read_linguas
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--id', dest='project_id')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--sources', dest='sources')
+parser.add_argument('--media', dest='media', default='')
+parser.add_argument('--langs', dest='langs', default='')
+parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
+
+def build_pot(srcdir: str, project_id: str, sources: T.List[str]) -> None:
+    # Must be relative paths
+    sources = [os.path.join('C', source) for source in sources]
+    outfile = os.path.join(srcdir, project_id + '.pot')
+    subprocess.call(['itstool', '-o', outfile] + sources)
+
+def update_po(srcdir: str, project_id: str, langs: T.List[str]) -> None:
+    potfile = os.path.join(srcdir, project_id + '.pot')
+    for lang in langs:
+        pofile = os.path.join(srcdir, lang, lang + '.po')
+        subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
+
+def build_translations(srcdir: str, blddir: str, langs: T.List[str]) -> None:
+    for lang in langs:
+        outdir = os.path.join(blddir, lang)
+        os.makedirs(outdir, exist_ok=True)
+        subprocess.call([
+            'msgfmt', os.path.join(srcdir, lang, lang + '.po'),
+            '-o', os.path.join(outdir, lang + '.gmo')
+        ])
+
+def merge_translations(blddir: str, sources: T.List[str], langs: T.List[str]) -> None:
+    for lang in langs:
+        subprocess.call([
+            'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
+            '-o', os.path.join(blddir, lang)
+        ] + sources)
+
+def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[str], langs: T.List[str], install_dir: str, destdir: str, project_id: str, symlinks: bool) -> None:
+    c_install_dir = os.path.join(install_dir, 'C', project_id)
+    for lang in langs + ['C']:
+        indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
+        os.makedirs(indir, exist_ok=True)
+        for source in sources:
+            infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source)
+            outfile = os.path.join(indir, source)
+            mlog.log(f'Installing {infile} to {outfile}')
+            shutil.copy2(infile, outfile)
+        for m in media:
+            infile = os.path.join(srcdir, lang, m)
+            outfile = os.path.join(indir, m)
+            c_infile = os.path.join(srcdir, 'C', m)
+            if not os.path.exists(infile):
+                if not os.path.exists(c_infile):
+                    mlog.warning('Media file "%s" did not exist in C directory' % m)
+                    continue
+                elif symlinks:
+                    srcfile = os.path.join(c_install_dir, m)
+                    mlog.log(f'Symlinking {outfile} to {srcfile}.')
+                    if has_path_sep(m):
+                        os.makedirs(os.path.dirname(outfile), exist_ok=True)
+                    try:
+                        try:
+                            os.symlink(srcfile, outfile)
+                        except FileExistsError:
+                            os.remove(outfile)
+                            os.symlink(srcfile, outfile)
+                        continue
+                    except (NotImplementedError, OSError):
+                        mlog.warning('Symlinking not supported, falling back to copying')
+                        infile = c_infile
+                else:
+                    # Lang doesn't have media file so copy it over 'C' one
+                    infile = c_infile
+            mlog.log(f'Installing {infile} to {outfile}')
+            if has_path_sep(m):
+                os.makedirs(os.path.dirname(outfile), exist_ok=True)
+            shutil.copyfile(infile, outfile)
+            shutil.copystat(infile, outfile)
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    langs = options.langs.split('@@') if options.langs else []
+    media = options.media.split('@@') if options.media else []
+    sources = options.sources.split('@@')
+    destdir = os.environ.get('DESTDIR', '')
+    src_subdir = os.path.join(os.environ['MESON_SOURCE_ROOT'], options.subdir)
+    build_subdir = os.path.join(os.environ['MESON_BUILD_ROOT'], options.subdir)
+    abs_sources = [os.path.join(src_subdir, 'C', source) for source in sources]
+
+    if not langs:
+        langs = read_linguas(src_subdir)
+
+    if options.command == 'pot':
+        build_pot(src_subdir, options.project_id, sources)
+    elif options.command == 'update-po':
+        build_pot(src_subdir, options.project_id, sources)
+        update_po(src_subdir, options.project_id, langs)
+    elif options.command == 'build':
+        if langs:
+            build_translations(src_subdir, build_subdir, langs)
+    elif options.command == 'install':
+        install_dir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], options.install_dir)
+        if langs:
+            build_translations(src_subdir, build_subdir, langs)
+            merge_translations(build_subdir, abs_sources, langs)
+        install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
+                     destdir, options.project_id, options.symlinks)
+    return 0
diff --git a/meson/mesonbuild/templates/__init__.py b/meson/mesonbuild/templates/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/meson/mesonbuild/templates/cpptemplates.py b/meson/mesonbuild/templates/cpptemplates.py
new file mode 100644
index 000000000..61c273745
--- /dev/null
+++ b/meson/mesonbuild/templates/cpptemplates.py
@@ -0,0 +1,185 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cpp_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] <<  "takes no arguments.\\n";
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << ".\\n";
+    return 0;
+}}
+'''
+
+hello_cpp_meson_template = '''project('{project_name}', 'cpp',
+  version : '{version}',
+  default_options : ['warning_level=3',
+                     'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_hpp_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+  {class_name}();
+  int get_number() const;
+
+private:
+
+  int number;
+
+}};
+
+}}
+
+'''
+
+lib_cpp_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+    number = 6;
+}}
+
+int {class_name}::get_number() const {{
+  return number;
+}}
+
+}}
+'''
+
+lib_cpp_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments.\\n";
+        return 1;
+    }}
+    {namespace}::{class_name} c;
+    return c.get_number() != 6;
+}}
+'''
+
+lib_cpp_meson_template = '''project('{project_name}', 'cpp',
+  version : '{version}',
+  default_options : ['warning_level=3', 'cpp_std=c++14'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  cpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+
+class CppProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.cpp'
+        open(source_name, 'w', encoding='utf-8').write(hello_cpp_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_cpp_meson_template.format(project_name=self.name,
+                                            exe_name=lowercase_token,
+                                            source_name=source_name,
+                                            version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_hpp_name = lowercase_token + '.hpp'
+        lib_cpp_name = lowercase_token + '.cpp'
+        test_cpp_name = lowercase_token + '_test.cpp'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'header_file': lib_hpp_name,
+                  'source_file': lib_cpp_name,
+                  'test_source_file': test_cpp_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_hpp_name, 'w', encoding='utf-8').write(lib_hpp_template.format(**kwargs))
+        open(lib_cpp_name, 'w', encoding='utf-8').write(lib_cpp_template.format(**kwargs))
+        open(test_cpp_name, 'w', encoding='utf-8').write(lib_cpp_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_cpp_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/cstemplates.py b/meson/mesonbuild/templates/cstemplates.py
new file mode 100644
index 000000000..bad7984e9
--- /dev/null
+++ b/meson/mesonbuild/templates/cstemplates.py
@@ -0,0 +1,134 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cs_template = '''using System;
+
+public class {class_name} {{
+    const String PROJECT_NAME = "{project_name}";
+
+    static int Main(String[] args) {{
+      if (args.Length > 0) {{
+          System.Console.WriteLine(String.Format("{project_name} takes no arguments.."));
+          return 1;
+      }}
+      Console.WriteLine(String.Format("This is project {{0}}.", PROJECT_NAME));
+      return 0;
+    }}
+}}
+
+'''
+
+hello_cs_meson_template = '''project('{project_name}', 'cs',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_cs_template = '''
+public class {class_name} {{
+    private const int number = 6;
+
+    public int get_number() {{
+      return number;
+    }}
+}}
+
+'''
+
+lib_cs_test_template = '''using System;
+
+public class {class_test} {{
+    static int Main(String[] args) {{
+      if (args.Length > 0) {{
+          System.Console.WriteLine("{project_name} takes no arguments..");
+          return 1;
+      }}
+      {class_name} c = new {class_name}();
+      Boolean result = true;
+      return result.CompareTo(c.get_number() != 6);
+    }}
+}}
+
+'''
+
+lib_cs_meson_template = '''project('{project_name}', 'cs',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+stlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : stlib)
+
+'''
+
+
+class CSharpProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        source_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+        open(source_name, 'w', encoding='utf-8').write(
+            hello_cs_template.format(project_name=self.name,
+                                     class_name=class_name))
+        open('meson.build', 'w', encoding='utf-8').write(
+          hello_cs_meson_template.format(project_name=self.name,
+                                         exe_name=self.name,
+                                         source_name=source_name,
+                                         version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+        project_test = lowercase_token + '_test'
+        lib_cs_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+        test_cs_name = uppercase_token[0] + lowercase_token[1:] + '_test.cs'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'class_test': class_test,
+                  'class_name': class_name,
+                  'source_file': lib_cs_name,
+                  'test_source_file': test_cs_name,
+                  'test_exe_name': project_test,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_cs_name, 'w', encoding='utf-8').write(lib_cs_template.format(**kwargs))
+        open(test_cs_name, 'w', encoding='utf-8').write(lib_cs_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_cs_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/ctemplates.py b/meson/mesonbuild/templates/ctemplates.py
new file mode 100644
index 000000000..9b651bc8b
--- /dev/null
+++ b/meson/mesonbuild/templates/ctemplates.py
@@ -0,0 +1,166 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_c_template = '''#include <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_c_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_c_meson_template = '''project('{project_name}', 'c',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  c_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_c_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    printf("This is project %s.\\n", PROJECT_NAME);
+    return 0;
+}}
+'''
+
+hello_c_meson_template = '''project('{project_name}', 'c',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class CProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.c'
+        open(source_name, 'w', encoding='utf-8').write(hello_c_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_c_meson_template.format(project_name=self.name,
+                                          exe_name=lowercase_token,
+                                          source_name=source_name,
+                                          version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_c_name = lowercase_token + '.c'
+        test_c_name = lowercase_token + '_test.c'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_c_name,
+                  'test_source_file': test_c_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_c_name, 'w', encoding='utf-8').write(lib_c_template.format(**kwargs))
+        open(test_c_name, 'w', encoding='utf-8').write(lib_c_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_c_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/cudatemplates.py b/meson/mesonbuild/templates/cudatemplates.py
new file mode 100644
index 000000000..919db21be
--- /dev/null
+++ b/meson/mesonbuild/templates/cudatemplates.py
@@ -0,0 +1,185 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cuda_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] <<  "takes no arguments.\\n";
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << ".\\n";
+    return 0;
+}}
+'''
+
+hello_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+  version : '{version}',
+  default_options : ['warning_level=3',
+                     'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+  {class_name}();
+  int get_number() const;
+
+private:
+
+  int number;
+
+}};
+
+}}
+
+'''
+
+lib_cuda_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+    number = 6;
+}}
+
+int {class_name}::get_number() const {{
+  return number;
+}}
+
+}}
+'''
+
+lib_cuda_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments.\\n";
+        return 1;
+    }}
+    {namespace}::{class_name} c;
+    return c.get_number() != 6;
+}}
+'''
+
+lib_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  cpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+
+class CudaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.cu'
+        open(source_name, 'w', encoding='utf-8').write(hello_cuda_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_cuda_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_h_name = lowercase_token + '.h'
+        lib_cuda_name = lowercase_token + '.cu'
+        test_cuda_name = lowercase_token + '_test.cu'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'header_file': lib_h_name,
+                  'source_file': lib_cuda_name,
+                  'test_source_file': test_cuda_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_cuda_name, 'w', encoding='utf-8').write(lib_cuda_template.format(**kwargs))
+        open(test_cuda_name, 'w', encoding='utf-8').write(lib_cuda_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_cuda_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/dlangtemplates.py b/meson/mesonbuild/templates/dlangtemplates.py
new file mode 100644
index 000000000..3d939d885
--- /dev/null
+++ b/meson/mesonbuild/templates/dlangtemplates.py
@@ -0,0 +1,143 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_d_template = '''module main;
+import std.stdio;
+
+enum PROJECT_NAME = "{project_name}";
+
+int main(string[] args) {{
+    if (args.length != 1){{
+        writefln("%s takes no arguments.\\n", args[0]);
+        return 1;
+    }}
+    writefln("This is project %s.\\n", PROJECT_NAME);
+    return 0;
+}}
+'''
+
+hello_d_meson_template = '''project('{project_name}', 'd',
+    version : '{version}',
+    default_options: ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_d_template = '''module {module_file};
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_d_test_template = '''module {module_file}_test;
+import std.stdio;
+import {module_file};
+
+
+int main(string[] args) {{
+    if (args.length != 1){{
+        writefln("%s takes no arguments.\\n", args[0]);
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_d_meson_template = '''project('{project_name}', 'd',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+stlib = static_library('{lib_name}', '{source_file}',
+  install : true,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : stlib)
+
+# Make this library usable from the Dlang
+# build system.
+dlang_mod = import('dlang')
+if find_program('dub', required: false).found()
+  dlang_mod.generate_dub_file(meson.project_name().to_lower(), meson.source_root(),
+    name : meson.project_name(),
+    license: meson.project_license(),
+    sourceFiles : '{source_file}',
+    description : 'Meson sample project.',
+    version : '{version}',
+  )
+endif
+'''
+
+
+class DlangProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.d'
+        open(source_name, 'w', encoding='utf-8').write(hello_d_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_d_meson_template.format(project_name=self.name,
+                                          exe_name=lowercase_token,
+                                          source_name=source_name,
+                                          version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_m_name = lowercase_token
+        lib_d_name = lowercase_token + '.d'
+        test_d_name = lowercase_token + '_test.d'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'module_file': lib_m_name,
+                  'source_file': lib_d_name,
+                  'test_source_file': test_d_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_d_name, 'w', encoding='utf-8').write(lib_d_template.format(**kwargs))
+        open(test_d_name, 'w', encoding='utf-8').write(lib_d_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_d_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/fortrantemplates.py b/meson/mesonbuild/templates/fortrantemplates.py
new file mode 100644
index 000000000..8fc1bca35
--- /dev/null
+++ b/meson/mesonbuild/templates/fortrantemplates.py
@@ -0,0 +1,140 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+lib_fortran_template = '''
+! This procedure will not be exported and is not
+! directly callable by users of this library.
+
+module modfoo
+
+implicit none
+private
+public :: {function_name}
+
+contains
+
+integer function internal_function()
+    internal_function = 0
+end function internal_function
+
+integer function {function_name}()
+    {function_name} = internal_function()
+end function {function_name}
+
+end module modfoo
+'''
+
+lib_fortran_test_template = '''
+use modfoo
+
+print *,{function_name}()
+
+end program
+'''
+
+lib_fortran_meson_template = '''project('{project_name}', 'fortran',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  fortran_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_fortran_template = '''
+implicit none
+
+character(len=*), parameter :: PROJECT_NAME = "{project_name}"
+
+print *,"This is project ", PROJECT_NAME
+
+end program
+'''
+
+hello_fortran_meson_template = '''project('{project_name}', 'fortran',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class FortranProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.f90'
+        open(source_name, 'w', encoding='utf-8').write(hello_fortran_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_fortran_meson_template.format(project_name=self.name,
+                                                exe_name=lowercase_token,
+                                                source_name=source_name,
+                                                version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_fortran_name = lowercase_token + '.f90'
+        test_fortran_name = lowercase_token + '_test.f90'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'source_file': lib_fortran_name,
+                  'test_source_file': test_fortran_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_fortran_name, 'w', encoding='utf-8').write(lib_fortran_template.format(**kwargs))
+        open(test_fortran_name, 'w', encoding='utf-8').write(lib_fortran_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_fortran_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/javatemplates.py b/meson/mesonbuild/templates/javatemplates.py
new file mode 100644
index 000000000..e4329611d
--- /dev/null
+++ b/meson/mesonbuild/templates/javatemplates.py
@@ -0,0 +1,136 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_java_template = '''
+
+public class {class_name} {{
+    final static String PROJECT_NAME = "{project_name}";
+
+    public static void main (String args[]) {{
+        if(args.length != 0) {{
+            System.out.println(args + " takes no arguments.");
+            System.exit(0);
+        }}
+        System.out.println("This is project " + PROJECT_NAME + ".");
+        System.exit(0);
+    }}
+}}
+
+'''
+
+hello_java_meson_template = '''project('{project_name}', 'java',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = jar('{exe_name}', '{source_name}',
+  main_class : '{exe_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_java_template = '''
+
+public class {class_name} {{
+    final static int number = 6;
+
+    public final int get_number() {{
+      return number;
+    }}
+}}
+
+'''
+
+lib_java_test_template = '''
+
+public class {class_test} {{
+    public static void main (String args[]) {{
+        if(args.length != 0) {{
+            System.out.println(args + " takes no arguments.");
+            System.exit(1);
+        }}
+
+        {class_name} c = new {class_name}();
+        Boolean result = true;
+        System.exit(result.compareTo(c.get_number() != 6));
+    }}
+}}
+
+'''
+
+lib_java_meson_template = '''project('{project_name}', 'java',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+jarlib = jar('{class_name}', '{source_file}',
+  main_class : '{class_name}',
+  install : true,
+)
+
+test_jar = jar('{class_test}', '{test_source_file}',
+  main_class : '{class_test}',
+  link_with : jarlib)
+test('{test_name}', test_jar)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : jarlib)
+'''
+
+
+class JavaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        source_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+        open(source_name, 'w', encoding='utf-8').write(
+            hello_java_template.format(project_name=self.name,
+                                       class_name=class_name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_java_meson_template.format(project_name=self.name,
+                                             exe_name=class_name,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+        lib_java_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+        test_java_name = uppercase_token[0] + lowercase_token[1:] + '_test.java'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'class_test': class_test,
+                  'class_name': class_name,
+                  'source_file': lib_java_name,
+                  'test_source_file': test_java_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_java_name, 'w', encoding='utf-8').write(lib_java_template.format(**kwargs))
+        open(test_java_name, 'w', encoding='utf-8').write(lib_java_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_java_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/mesontemplates.py b/meson/mesonbuild/templates/mesontemplates.py
new file mode 100644
index 000000000..a29ac6fdc
--- /dev/null
+++ b/meson/mesonbuild/templates/mesontemplates.py
@@ -0,0 +1,75 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+meson_executable_template = '''project('{project_name}', '{language}',
+  version : '{version}',
+  default_options : [{default_options}])
+
+executable('{executable}',
+           {sourcespec},{depspec}
+           install : true)
+'''
+
+
+meson_jar_template = '''project('{project_name}', '{language}',
+  version : '{version}',
+  default_options : [{default_options}])
+
+jar('{executable}',
+    {sourcespec},{depspec}
+    main_class: '{main_class}',
+    install : true)
+'''
+
+
+def create_meson_build(options: argparse.Namespace) -> None:
+    if options.type != 'executable':
+        raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
+                         'supported only for project type "executable".\n'
+                         'Run meson init in an empty directory to create a sample project.')
+    default_options = ['warning_level=3']
+    if options.language == 'cpp':
+        # This shows how to set this very common option.
+        default_options += ['cpp_std=c++14']
+    # If we get a meson.build autoformatter one day, this code could
+    # be simplified quite a bit.
+    formatted_default_options = ', '.join(f"'{x}'" for x in default_options)
+    sourcespec = ',\n           '.join(f"'{x}'" for x in options.srcfiles)
+    depspec = ''
+    if options.deps:
+        depspec = '\n           dependencies : [\n              '
+        depspec += ',\n              '.join(f"dependency('{x}')"
+                                            for x in options.deps.split(','))
+        depspec += '],'
+    if options.language != 'java':
+        content = meson_executable_template.format(project_name=options.name,
+                                                   language=options.language,
+                                                   version=options.version,
+                                                   executable=options.executable,
+                                                   sourcespec=sourcespec,
+                                                   depspec=depspec,
+                                                   default_options=formatted_default_options)
+    else:
+        content = meson_jar_template.format(project_name=options.name,
+                                            language=options.language,
+                                            version=options.version,
+                                            executable=options.executable,
+                                            main_class=options.name,
+                                            sourcespec=sourcespec,
+                                            depspec=depspec,
+                                            default_options=formatted_default_options)
+    open('meson.build', 'w', encoding='utf-8').write(content)
+    print('Generated meson.build file:\n\n' + content)
diff --git a/meson/mesonbuild/templates/objcpptemplates.py b/meson/mesonbuild/templates/objcpptemplates.py
new file mode 100644
index 000000000..4f61d6c60
--- /dev/null
+++ b/meson/mesonbuild/templates/objcpptemplates.py
@@ -0,0 +1,167 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objcpp_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_objcpp_test_template = '''#import <{header_file}>
+#import 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments." << std::endl;
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  objcpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_objcpp_template = '''#import 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments." << std::endl;
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << "." << std::endl;
+    return 0;
+}}
+'''
+
+hello_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCppProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.mm'
+        open(source_name, 'w', encoding='utf-8').write(hello_objcpp_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_objcpp_meson_template.format(project_name=self.name,
+                                               exe_name=lowercase_token,
+                                               source_name=source_name,
+                                               version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_objcpp_name = lowercase_token + '.mm'
+        test_objcpp_name = lowercase_token + '_test.mm'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_objcpp_name,
+                  'test_source_file': test_objcpp_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_objcpp_name, 'w', encoding='utf-8').write(lib_objcpp_template.format(**kwargs))
+        open(test_objcpp_name, 'w', encoding='utf-8').write(lib_objcpp_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_objcpp_meson_template.format(**kwargs))
+
diff --git a/meson/mesonbuild/templates/objctemplates.py b/meson/mesonbuild/templates/objctemplates.py
new file mode 100644
index 000000000..dac638d14
--- /dev/null
+++ b/meson/mesonbuild/templates/objctemplates.py
@@ -0,0 +1,166 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objc_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_objc_test_template = '''#import <{header_file}>
+#import 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_objc_meson_template = '''project('{project_name}', 'objc',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  objc_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_objc_template = '''#import 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    printf("This is project %s.\\n", PROJECT_NAME);
+    return 0;
+}}
+'''
+
+hello_objc_meson_template = '''project('{project_name}', 'objc',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.m'
+        open(source_name, 'w', encoding='utf-8').write(hello_objc_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_objc_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_objc_name = lowercase_token + '.m'
+        test_objc_name = lowercase_token + '_test.m'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_objc_name,
+                  'test_source_file': test_objc_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_objc_name, 'w', encoding='utf-8').write(lib_objc_template.format(**kwargs))
+        open(test_objc_name, 'w', encoding='utf-8').write(lib_objc_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_objc_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/rusttemplates.py b/meson/mesonbuild/templates/rusttemplates.py
new file mode 100644
index 000000000..95a937cca
--- /dev/null
+++ b/meson/mesonbuild/templates/rusttemplates.py
@@ -0,0 +1,113 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_rust_template = '''#![crate_name = "{crate_file}"]
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+fn internal_function() -> i32 {{
+    return 0;
+}}
+
+pub fn {function_name}() -> i32 {{
+    return internal_function();
+}}
+'''
+
+lib_rust_test_template = '''extern crate {crate_file};
+
+fn main() {{
+    println!("printing: {{}}", {crate_file}::{function_name}());
+}}
+'''
+
+
+lib_rust_meson_template = '''project('{project_name}', 'rust',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+shlib = static_library('{lib_name}', '{source_file}', install : true)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+'''
+
+hello_rust_template = '''
+fn main() {{
+    let project_name = "{project_name}";
+    println!("This is project {{}}.\\n", project_name);
+}}
+'''
+
+hello_rust_meson_template = '''project('{project_name}', 'rust',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class RustProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.rs'
+        open(source_name, 'w', encoding='utf-8').write(hello_rust_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_rust_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_crate_name = lowercase_token
+        lib_rs_name = lowercase_token + '.rs'
+        test_rs_name = lowercase_token + '_test.rs'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'crate_file': lib_crate_name,
+                  'source_file': lib_rs_name,
+                  'test_source_file': test_rs_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_rs_name, 'w', encoding='utf-8').write(lib_rust_template.format(**kwargs))
+        open(test_rs_name, 'w', encoding='utf-8').write(lib_rust_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_rust_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/samplefactory.py b/meson/mesonbuild/templates/samplefactory.py
new file mode 100644
index 000000000..1192e138a
--- /dev/null
+++ b/meson/mesonbuild/templates/samplefactory.py
@@ -0,0 +1,40 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.fortrantemplates import FortranProject
+from mesonbuild.templates.objcpptemplates import ObjCppProject
+from mesonbuild.templates.dlangtemplates import DlangProject
+from mesonbuild.templates.rusttemplates import RustProject
+from mesonbuild.templates.javatemplates import JavaProject
+from mesonbuild.templates.cudatemplates import CudaProject
+from mesonbuild.templates.objctemplates import ObjCProject
+from mesonbuild.templates.cpptemplates import CppProject
+from mesonbuild.templates.cstemplates import CSharpProject
+from mesonbuild.templates.ctemplates import CProject
+from mesonbuild.templates.sampleimpl import SampleImpl
+
+import argparse
+
+def sameple_generator(options: argparse.Namespace) -> SampleImpl:
+    return {
+        'c': CProject,
+        'cpp': CppProject,
+        'cs': CSharpProject,
+        'cuda': CudaProject,
+        'objc': ObjCProject,
+        'objcpp': ObjCppProject,
+        'java': JavaProject,
+        'd': DlangProject,
+        'rust': RustProject,
+        'fortran': FortranProject
+    }[options.language](options)
diff --git a/meson/mesonbuild/templates/sampleimpl.py b/meson/mesonbuild/templates/sampleimpl.py
new file mode 100644
index 000000000..2d1498b83
--- /dev/null
+++ b/meson/mesonbuild/templates/sampleimpl.py
@@ -0,0 +1,21 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class SampleImpl:
+    def create_executable(self) -> None:
+        raise NotImplementedError('Sample implementation for "executable" not implemented!')
+
+    def create_library(self) -> None:
+        raise NotImplementedError('Sample implementation for "library" not implemented!')
diff --git a/meson/mesonbuild/wrap/__init__.py b/meson/mesonbuild/wrap/__init__.py
new file mode 100644
index 000000000..653f42ab9
--- /dev/null
+++ b/meson/mesonbuild/wrap/__init__.py
@@ -0,0 +1,59 @@
+from enum import Enum
+
+# Used for the --wrap-mode command-line argument
+#
+# Special wrap modes:
+#   nofallback: Don't download wraps for dependency() fallbacks
+#   nodownload: Don't download wraps for all subproject() calls
+#
+# subprojects are used for two purposes:
+# 1. To download and build dependencies by using .wrap
+#    files if they are not provided by the system. This is
+#    usually expressed via dependency(..., fallback: ...).
+# 2. To download and build 'copylibs' which are meant to be
+#    used by copying into your project. This is always done
+#    with an explicit subproject() call.
+#
+# --wrap-mode=nofallback will never do (1)
+# --wrap-mode=nodownload will do neither (1) nor (2)
+#
+# If you are building from a release tarball, you should be
+# able to safely use 'nodownload' since upstream is
+# expected to ship all required sources with the tarball.
+#
+# If you are building from a git repository, you will want
+# to use 'nofallback' so that any 'copylib' wraps will be
+# download as subprojects.
+#
+# --wrap-mode=forcefallback will ignore external dependencies,
+# even if they match the version requirements, and automatically
+# use the fallback if one was provided. This is useful for example
+# to make sure a project builds when using the fallbacks.
+#
+# Note that these options do not affect subprojects that
+# are git submodules since those are only usable in git
+# repositories, and you almost always want to download them.
+
+# This did _not_ work when inside the WrapMode class.
+# I don't know why. If you can fix this, patches welcome.
+string_to_value = {'default': 1,
+                   'nofallback': 2,
+                   'nodownload': 3,
+                   'forcefallback': 4,
+                   'nopromote': 5,
+                   }
+
+class WrapMode(Enum):
+    default = 1
+    nofallback = 2
+    nodownload = 3
+    forcefallback = 4
+    nopromote = 5
+
+    def __str__(self) -> str:
+        return self.name
+
+    @staticmethod
+    def from_string(mode_name: str) -> 'WrapMode':
+        g = string_to_value[mode_name]
+        return WrapMode(g)
diff --git a/meson/mesonbuild/wrap/wrap.py b/meson/mesonbuild/wrap/wrap.py
new file mode 100644
index 000000000..6c145ab6a
--- /dev/null
+++ b/meson/mesonbuild/wrap/wrap.py
@@ -0,0 +1,607 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mlog
+import contextlib
+import urllib.request
+import urllib.error
+import urllib.parse
+import os
+import hashlib
+import shutil
+import tempfile
+import stat
+import subprocess
+import sys
+import configparser
+import typing as T
+import textwrap
+
+from pathlib import Path
+from . import WrapMode
+from .. import coredata
+from ..mesonlib import quiet_git, GIT, ProgressBar, MesonException
+from  .. import mesonlib
+
+if T.TYPE_CHECKING:
+    import http.client
+
+try:
+    # Importing is just done to check if SSL exists, so all warnings
+    # regarding 'imported but unused' can be safely ignored
+    import ssl  # noqa
+    has_ssl = True
+except ImportError:
+    has_ssl = False
+
+REQ_TIMEOUT = 600.0
+SSL_WARNING_PRINTED = False
+WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com'
+
+ALL_TYPES = ['file', 'git', 'hg', 'svn']
+
+def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
+    """ raises WrapException if not whitelisted subdomain """
+    url = urllib.parse.urlparse(urlstr)
+    if not url.hostname:
+        raise WrapException(f'{urlstr} is not a valid URL')
+    if not url.hostname.endswith(WHITELIST_SUBDOMAIN):
+        raise WrapException(f'{urlstr} is not a whitelisted WrapDB URL')
+    if has_ssl and not url.scheme == 'https':
+        raise WrapException(f'WrapDB did not have expected SSL https url, instead got {urlstr}')
+    return url
+
+def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse':
+    global SSL_WARNING_PRINTED
+
+    url = whitelist_wrapdb(urlstring)
+    if has_ssl:
+        try:
+            return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT))
+        except urllib.error.URLError as excp:
+            raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}')
+
+    # following code is only for those without Python SSL
+    nossl_url = url._replace(scheme='http')
+    if not SSL_WARNING_PRINTED:
+        mlog.warning(f'SSL module not available in {sys.executable}: WrapDB traffic not authenticated.')
+        SSL_WARNING_PRINTED = True
+    try:
+        return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT))
+    except urllib.error.URLError as excp:
+        raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}')
+
+
+class WrapException(MesonException):
+    pass
+
+class WrapNotFoundException(WrapException):
+    pass
+
+class PackageDefinition:
+    def __init__(self, fname: str):
+        self.filename = fname
+        self.type = None  # type: T.Optional[str]
+        self.values = {} # type: T.Dict[str, str]
+        self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
+        self.provided_programs = [] # type: T.List[str]
+        self.basename = os.path.basename(fname)
+        self.has_wrap = self.basename.endswith('.wrap')
+        self.name = self.basename[:-5] if self.has_wrap else self.basename
+        self.directory = self.name
+        self.provided_deps[self.name] = None
+        self.original_filename = fname
+        self.redirected = False
+        if self.has_wrap:
+            self.parse_wrap()
+        self.directory = self.values.get('directory', self.name)
+        if os.path.dirname(self.directory):
+            raise WrapException('Directory key must be a name and not a path')
+        if self.type and self.type not in ALL_TYPES:
+            raise WrapException(f'Unknown wrap type {self.type!r}')
+        self.filesdir = os.path.join(os.path.dirname(self.filename), 'packagefiles')
+        # What the original file name was before redirection
+
+    def parse_wrap(self) -> None:
+        try:
+            config = configparser.ConfigParser(interpolation=None)
+            config.read(self.filename)
+        except configparser.Error as e:
+            raise WrapException('Failed to parse {}: {}'.format(self.basename, str(e)))
+        self.parse_wrap_section(config)
+        if self.type == 'redirect':
+            # [wrap-redirect] have a `filename` value pointing to the real wrap
+            # file we should parse instead. It must be relative to the current
+            # wrap file location and must be in the form foo/subprojects/bar.wrap.
+            dirname = Path(self.filename).parent
+            fname = Path(self.values['filename'])
+            for i, p in enumerate(fname.parts):
+                if i % 2 == 0:
+                    if p == '..':
+                        raise WrapException('wrap-redirect filename cannot contain ".."')
+                else:
+                    if p != 'subprojects':
+                        raise WrapException('wrap-redirect filename must be in the form foo/subprojects/bar.wrap')
+            if fname.suffix != '.wrap':
+                raise WrapException('wrap-redirect filename must be a .wrap file')
+            fname = dirname / fname
+            if not fname.is_file():
+                raise WrapException(f'wrap-redirect {fname} filename does not exist')
+            self.filename = str(fname)
+            self.parse_wrap()
+            self.redirected = True
+            return
+        self.parse_provide_section(config)
+
+    def parse_wrap_section(self, config: configparser.ConfigParser) -> None:
+        if len(config.sections()) < 1:
+            raise WrapException(f'Missing sections in {self.basename}')
+        self.wrap_section = config.sections()[0]
+        if not self.wrap_section.startswith('wrap-'):
+            m = '{!r} is not a valid first section in {}'
+            raise WrapException(m.format(self.wrap_section, self.basename))
+        self.type = self.wrap_section[5:]
+        self.values = dict(config[self.wrap_section])
+
+    def parse_provide_section(self, config: configparser.ConfigParser) -> None:
+        if config.has_section('provide'):
+            for k, v in config['provide'].items():
+                if k == 'dependency_names':
+                    # A comma separated list of dependency names that does not
+                    # need a variable name
+                    names_dict = {n.strip(): None for n in v.split(',')}
+                    self.provided_deps.update(names_dict)
+                    continue
+                if k == 'program_names':
+                    # A comma separated list of program names
+                    names_list = [n.strip() for n in v.split(',')]
+                    self.provided_programs += names_list
+                    continue
+                if not v:
+                    m = ('Empty dependency variable name for {!r} in {}. '
+                         'If the subproject uses meson.override_dependency() '
+                         'it can be added in the "dependency_names" special key.')
+                    raise WrapException(m.format(k, self.basename))
+                self.provided_deps[k] = v
+
+    def get(self, key: str) -> str:
+        try:
+            return self.values[key]
+        except KeyError:
+            m = 'Missing key {!r} in {}'
+            raise WrapException(m.format(key, self.basename))
+
+def get_directory(subdir_root: str, packagename: str) -> str:
+    fname = os.path.join(subdir_root, packagename + '.wrap')
+    if os.path.isfile(fname):
+        wrap = PackageDefinition(fname)
+        return wrap.directory
+    return packagename
+
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+    '''
+    Wrapper to convert GitException to WrapException caught in interpreter.
+    '''
+    try:
+        return mesonlib.verbose_git(cmd, workingdir, check=check)
+    except mesonlib.GitException as e:
+        raise WrapException(str(e))
+
+class Resolver:
+    def __init__(self, source_dir: str, subdir: str, wrap_mode: WrapMode = WrapMode.default) -> None:
+        self.source_dir = source_dir
+        self.subdir = subdir
+        self.wrap_mode = wrap_mode
+        self.subdir_root = os.path.join(source_dir, subdir)
+        self.cachedir = os.path.join(self.subdir_root, 'packagecache')
+        self.wraps = {} # type: T.Dict[str, PackageDefinition]
+        self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
+        self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+        self.load_wraps()
+
+    def load_wraps(self) -> None:
+        if not os.path.isdir(self.subdir_root):
+            return
+        root, dirs, files = next(os.walk(self.subdir_root))
+        for i in files:
+            if not i.endswith('.wrap'):
+                continue
+            fname = os.path.join(self.subdir_root, i)
+            wrap = PackageDefinition(fname)
+            self.wraps[wrap.name] = wrap
+            if wrap.directory in dirs:
+                dirs.remove(wrap.directory)
+        # Add dummy package definition for directories not associated with a wrap file.
+        for i in dirs:
+            if i in ['packagecache', 'packagefiles']:
+                continue
+            fname = os.path.join(self.subdir_root, i)
+            wrap = PackageDefinition(fname)
+            self.wraps[wrap.name] = wrap
+
+        for wrap in self.wraps.values():
+            for k in wrap.provided_deps.keys():
+                if k in self.provided_deps:
+                    prev_wrap = self.provided_deps[k]
+                    m = 'Multiple wrap files provide {!r} dependency: {} and {}'
+                    raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+                self.provided_deps[k] = wrap
+            for k in wrap.provided_programs:
+                if k in self.provided_programs:
+                    prev_wrap = self.provided_programs[k]
+                    m = 'Multiple wrap files provide {!r} program: {} and {}'
+                    raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+                self.provided_programs[k] = wrap
+
+    def merge_wraps(self, other_resolver: 'Resolver') -> None:
+        for k, v in other_resolver.wraps.items():
+            self.wraps.setdefault(k, v)
+        for k, v in other_resolver.provided_deps.items():
+            self.provided_deps.setdefault(k, v)
+        for k, v in other_resolver.provided_programs.items():
+            self.provided_programs.setdefault(k, v)
+
+    def find_dep_provider(self, packagename: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
+        # Python's ini parser converts all key values to lowercase.
+        # Thus the query name must also be in lower case.
+        packagename = packagename.lower()
+        wrap = self.provided_deps.get(packagename)
+        if wrap:
+            dep_var = wrap.provided_deps.get(packagename)
+            return wrap.name, dep_var
+        return None, None
+
+    def get_varname(self, subp_name: str, depname: str) -> T.Optional[str]:
+        wrap = self.wraps.get(subp_name)
+        return wrap.provided_deps.get(depname) if wrap else None
+
+    def find_program_provider(self, names: T.List[str]) -> T.Optional[str]:
+        for name in names:
+            wrap = self.provided_programs.get(name)
+            if wrap:
+                return wrap.name
+        return None
+
+    def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str:
+        self.current_subproject = current_subproject
+        self.packagename = packagename
+        self.directory = packagename
+        self.wrap = self.wraps.get(packagename)
+        if not self.wrap:
+            m = 'Neither a subproject directory nor a {}.wrap file was found.'
+            raise WrapNotFoundException(m.format(self.packagename))
+        self.directory = self.wrap.directory
+
+        if self.wrap.has_wrap:
+            # We have a .wrap file, source code will be placed into main
+            # project's subproject_dir even if the wrap file comes from another
+            # subproject.
+            self.dirname = os.path.join(self.subdir_root, self.directory)
+            # Check if the wrap comes from the main project.
+            main_fname = os.path.join(self.subdir_root, self.wrap.basename)
+            if self.wrap.filename != main_fname:
+                rel = os.path.relpath(self.wrap.filename, self.source_dir)
+                mlog.log('Using', mlog.bold(rel))
+                # Write a dummy wrap file in main project that redirect to the
+                # wrap we picked.
+                with open(main_fname, 'w', encoding='utf-8') as f:
+                    f.write(textwrap.dedent('''\
+                        [wrap-redirect]
+                        filename = {}
+                        '''.format(os.path.relpath(self.wrap.filename, self.subdir_root))))
+        else:
+            # No wrap file, it's a dummy package definition for an existing
+            # directory. Use the source code in place.
+            self.dirname = self.wrap.filename
+        rel_path = os.path.relpath(self.dirname, self.source_dir)
+
+        meson_file = os.path.join(self.dirname, 'meson.build')
+        cmake_file = os.path.join(self.dirname, 'CMakeLists.txt')
+
+        if method not in ['meson', 'cmake']:
+            raise WrapException('Only the methods "meson" and "cmake" are supported')
+
+        # The directory is there and has meson.build? Great, use it.
+        if method == 'meson' and os.path.exists(meson_file):
+            return rel_path
+        if method == 'cmake' and os.path.exists(cmake_file):
+            return rel_path
+
+        # Check if the subproject is a git submodule
+        self.resolve_git_submodule()
+
+        if os.path.exists(self.dirname):
+            if not os.path.isdir(self.dirname):
+                raise WrapException('Path already exists but is not a directory')
+        else:
+            if self.wrap.type == 'file':
+                self.get_file()
+            else:
+                self.check_can_download()
+                if self.wrap.type == 'git':
+                    self.get_git()
+                elif self.wrap.type == "hg":
+                    self.get_hg()
+                elif self.wrap.type == "svn":
+                    self.get_svn()
+                else:
+                    raise WrapException(f'Unknown wrap type {self.wrap.type!r}')
+            self.apply_patch()
+
+        # A meson.build or CMakeLists.txt file is required in the directory
+        if method == 'meson' and not os.path.exists(meson_file):
+            raise WrapException('Subproject exists but has no meson.build file')
+        if method == 'cmake' and not os.path.exists(cmake_file):
+            raise WrapException('Subproject exists but has no CMakeLists.txt file')
+
+        return rel_path
+
+    def check_can_download(self) -> None:
+        # Don't download subproject data based on wrap file if requested.
+        # Git submodules are ok (see above)!
+        if self.wrap_mode is WrapMode.nodownload:
+            m = 'Automatic wrap-based subproject downloading is disabled'
+            raise WrapException(m)
+
+    def resolve_git_submodule(self) -> bool:
+        # Is git installed? If not, we're probably not in a git repository and
+        # definitely cannot try to conveniently set up a submodule.
+        if not GIT:
+            return False
+        # Are we in a git repository?
+        ret, out = quiet_git(['rev-parse'], self.subdir_root)
+        if not ret:
+            return False
+        # Is `dirname` a submodule?
+        ret, out = quiet_git(['submodule', 'status', self.dirname], self.subdir_root)
+        if not ret:
+            return False
+        # Submodule has not been added, add it
+        if out.startswith('+'):
+            mlog.warning('git submodule might be out of date')
+            return True
+        elif out.startswith('U'):
+            raise WrapException('git submodule has merge conflicts')
+        # Submodule exists, but is deinitialized or wasn't initialized
+        elif out.startswith('-'):
+            if verbose_git(['submodule', 'update', '--init', self.dirname], self.subdir_root):
+                return True
+            raise WrapException('git submodule failed to init')
+        # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout.
+        elif out.startswith(' '):
+            verbose_git(['checkout', '.'], self.dirname)
+            # Even if checkout failed, try building it anyway and let the user
+            # handle any problems manually.
+            return True
+        elif out == '':
+            # It is not a submodule, just a folder that exists in the main repository.
+            return False
+        m = 'Unknown git submodule output: {!r}'
+        raise WrapException(m.format(out))
+
+    def get_file(self) -> None:
+        path = self.get_file_internal('source')
+        extract_dir = self.subdir_root
+        # Some upstreams ship packages that do not have a leading directory.
+        # Create one for them.
+        if 'lead_directory_missing' in self.wrap.values:
+            os.mkdir(self.dirname)
+            extract_dir = self.dirname
+        shutil.unpack_archive(path, extract_dir)
+
+    def get_git(self) -> None:
+        if not GIT:
+            raise WrapException('Git program not found.')
+        revno = self.wrap.get('revision')
+        is_shallow = False
+        depth_option = []    # type: T.List[str]
+        if self.wrap.values.get('depth', '') != '':
+            is_shallow = True
+            depth_option = ['--depth', self.wrap.values.get('depth')]
+        # for some reason git only allows commit ids to be shallowly fetched by fetch not with clone
+        if is_shallow and self.is_git_full_commit_id(revno):
+            # git doesn't support directly cloning shallowly for commits,
+            # so we follow https://stackoverflow.com/a/43136160
+            verbose_git(['init', self.directory], self.subdir_root, check=True)
+            verbose_git(['remote', 'add', 'origin', self.wrap.get('url')], self.dirname, check=True)
+            revno = self.wrap.get('revision')
+            verbose_git(['fetch', *depth_option, 'origin', revno], self.dirname, check=True)
+            verbose_git(['checkout', revno, '--'], self.dirname, check=True)
+            if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+                verbose_git(['submodule', 'update', '--init', '--checkout',
+                             '--recursive', *depth_option], self.dirname, check=True)
+            push_url = self.wrap.values.get('push-url')
+            if push_url:
+                verbose_git(['remote', 'set-url', '--push', 'origin', push_url], self.dirname, check=True)
+        else:
+            if not is_shallow:
+                verbose_git(['clone', self.wrap.get('url'), self.directory], self.subdir_root, check=True)
+                if revno.lower() != 'head':
+                    if not verbose_git(['checkout', revno, '--'], self.dirname):
+                        verbose_git(['fetch', self.wrap.get('url'), revno], self.dirname, check=True)
+                        verbose_git(['checkout', revno, '--'], self.dirname, check=True)
+            else:
+                verbose_git(['clone', *depth_option, '--branch', revno, self.wrap.get('url'),
+                             self.directory], self.subdir_root, check=True)
+            if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+                verbose_git(['submodule', 'update', '--init', '--checkout', '--recursive', *depth_option],
+                            self.dirname, check=True)
+            push_url = self.wrap.values.get('push-url')
+            if push_url:
+                verbose_git(['remote', 'set-url', '--push', 'origin', push_url], self.dirname, check=True)
+
+    def is_git_full_commit_id(self, revno: str) -> bool:
+        result = False
+        if len(revno) in (40, 64): # 40 for sha1, 64 for upcoming sha256
+            result = all(ch in '0123456789AaBbCcDdEeFf' for ch in revno)
+        return result
+
+    def get_hg(self) -> None:
+        revno = self.wrap.get('revision')
+        hg = shutil.which('hg')
+        if not hg:
+            raise WrapException('Mercurial program not found.')
+        subprocess.check_call([hg, 'clone', self.wrap.get('url'),
+                               self.directory], cwd=self.subdir_root)
+        if revno.lower() != 'tip':
+            subprocess.check_call([hg, 'checkout', revno],
+                                  cwd=self.dirname)
+
+    def get_svn(self) -> None:
+        revno = self.wrap.get('revision')
+        svn = shutil.which('svn')
+        if not svn:
+            raise WrapException('SVN program not found.')
+        subprocess.check_call([svn, 'checkout', '-r', revno, self.wrap.get('url'),
+                               self.directory], cwd=self.subdir_root)
+
+    def get_data(self, urlstring: str) -> T.Tuple[str, str]:
+        blocksize = 10 * 1024
+        h = hashlib.sha256()
+        tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False)
+        url = urllib.parse.urlparse(urlstring)
+        if url.hostname and url.hostname.endswith(WHITELIST_SUBDOMAIN):
+            resp = open_wrapdburl(urlstring)
+        elif WHITELIST_SUBDOMAIN in urlstring:
+            raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL')
+        else:
+            try:
+                req = urllib.request.Request(urlstring, headers={'User-Agent': f'mesonbuild/{coredata.version}'})
+                resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
+            except urllib.error.URLError as e:
+                mlog.log(str(e))
+                raise WrapException(f'could not get {urlstring} is the internet available?')
+        with contextlib.closing(resp) as resp:
+            try:
+                dlsize = int(resp.info()['Content-Length'])
+            except TypeError:
+                dlsize = None
+            if dlsize is None:
+                print('Downloading file of unknown size.')
+                while True:
+                    block = resp.read(blocksize)
+                    if block == b'':
+                        break
+                    h.update(block)
+                    tmpfile.write(block)
+                hashvalue = h.hexdigest()
+                return hashvalue, tmpfile.name
+            sys.stdout.flush()
+            progress_bar = ProgressBar(bar_type='download', total=dlsize,
+                                       desc='Downloading')
+            while True:
+                block = resp.read(blocksize)
+                if block == b'':
+                    break
+                h.update(block)
+                tmpfile.write(block)
+                progress_bar.update(len(block))
+            progress_bar.close()
+            hashvalue = h.hexdigest()
+        return hashvalue, tmpfile.name
+
+    def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
+        if what + '_hash' not in self.wrap.values and not hash_required:
+            return
+        expected = self.wrap.get(what + '_hash').lower()
+        h = hashlib.sha256()
+        with open(path, 'rb') as f:
+            h.update(f.read())
+        dhash = h.hexdigest()
+        if dhash != expected:
+            raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
+
+    def download(self, what: str, ofname: str, fallback: bool = False) -> None:
+        self.check_can_download()
+        srcurl = self.wrap.get(what + ('_fallback_url' if fallback else '_url'))
+        mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl))
+        try:
+            dhash, tmpfile = self.get_data(srcurl)
+            expected = self.wrap.get(what + '_hash').lower()
+            if dhash != expected:
+                os.remove(tmpfile)
+                raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
+        except WrapException:
+            if not fallback:
+                if what + '_fallback_url' in self.wrap.values:
+                    return self.download(what, ofname, fallback=True)
+                mlog.log('A fallback URL could be specified using',
+                         mlog.bold(what + '_fallback_url'), 'key in the wrap file')
+            raise
+        os.rename(tmpfile, ofname)
+
+    def get_file_internal(self, what: str) -> str:
+        filename = self.wrap.get(what + '_filename')
+        if what + '_url' in self.wrap.values:
+            cache_path = os.path.join(self.cachedir, filename)
+
+            if os.path.exists(cache_path):
+                self.check_hash(what, cache_path)
+                mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+                return cache_path
+
+            if not os.path.isdir(self.cachedir):
+                os.mkdir(self.cachedir)
+            self.download(what, cache_path)
+            return cache_path
+        else:
+            from ..interpreterbase import FeatureNew
+            FeatureNew(f'Local wrap patch files without {what}_url', '0.55.0').use(self.current_subproject)
+            path = Path(self.wrap.filesdir) / filename
+
+            if not path.exists():
+                raise WrapException(f'File "{path}" does not exist')
+            self.check_hash(what, path.as_posix(), hash_required=False)
+
+            return path.as_posix()
+
+    def apply_patch(self) -> None:
+        if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+            m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"'
+            raise WrapException(m.format(self.wrap.basename))
+        if 'patch_filename' in self.wrap.values:
+            path = self.get_file_internal('patch')
+            try:
+                shutil.unpack_archive(path, self.subdir_root)
+            except Exception:
+                with tempfile.TemporaryDirectory() as workdir:
+                    shutil.unpack_archive(path, workdir)
+                    self.copy_tree(workdir, self.subdir_root)
+        elif 'patch_directory' in self.wrap.values:
+            from ..interpreterbase import FeatureNew
+            FeatureNew('patch_directory', '0.55.0').use(self.current_subproject)
+            patch_dir = self.wrap.values['patch_directory']
+            src_dir = os.path.join(self.wrap.filesdir, patch_dir)
+            if not os.path.isdir(src_dir):
+                raise WrapException(f'patch directory does not exists: {patch_dir}')
+            self.copy_tree(src_dir, self.dirname)
+
+    def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
+        """
+        Copy directory tree. Overwrites also read only files.
+        """
+        for src_dir, _, files in os.walk(root_src_dir):
+            dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
+            if not os.path.exists(dst_dir):
+                os.makedirs(dst_dir)
+            for file_ in files:
+                src_file = os.path.join(src_dir, file_)
+                dst_file = os.path.join(dst_dir, file_)
+                if os.path.exists(dst_file):
+                    try:
+                        os.remove(dst_file)
+                    except PermissionError:
+                        os.chmod(dst_file, stat.S_IWUSR)
+                        os.remove(dst_file)
+                shutil.copy2(src_file, dst_dir)
diff --git a/meson/mesonbuild/wrap/wraptool.py b/meson/mesonbuild/wrap/wraptool.py
new file mode 100644
index 000000000..222996d46
--- /dev/null
+++ b/meson/mesonbuild/wrap/wraptool.py
@@ -0,0 +1,220 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import sys, os
+import configparser
+import shutil
+import typing as T
+
+from glob import glob
+from urllib.parse import urlparse
+from urllib.request import urlopen
+from .wrap import WrapException
+
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+    import argparse
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    subparsers = parser.add_subparsers(title='Commands', dest='command')
+    subparsers.required = True
+
+    p = subparsers.add_parser('list', help='show all available projects')
+    p.set_defaults(wrap_func=list_projects)
+
+    p = subparsers.add_parser('search', help='search the db by name')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=search)
+
+    p = subparsers.add_parser('install', help='install the specified project')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=install)
+
+    p = subparsers.add_parser('update', help='update the project to its newest available release')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=update)
+
+    p = subparsers.add_parser('info', help='show available versions of a project')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=info)
+
+    p = subparsers.add_parser('status', help='show installed and available versions of your projects')
+    p.set_defaults(wrap_func=status)
+
+    p = subparsers.add_parser('promote', help='bring a subsubproject up to the master project')
+    p.add_argument('project_path')
+    p.set_defaults(wrap_func=promote)
+
+def get_releases() -> T.Dict[str, T.Any]:
+    url = urlopen('https://wrapdb.mesonbuild.com/v2/releases.json')
+    return T.cast(T.Dict[str, T.Any], json.loads(url.read().decode()))
+
+def list_projects(options: 'argparse.Namespace') -> None:
+    releases = get_releases()
+    for p in releases.keys():
+        print(p)
+
+def search(options: 'argparse.Namespace') -> None:
+    name = options.name
+    releases = get_releases()
+    for p in releases.keys():
+        if p.startswith(name):
+            print(p)
+
+def get_latest_version(name: str) -> tuple:
+    releases = get_releases()
+    info = releases.get(name)
+    if not info:
+        raise WrapException(f'Wrap {name} not found in wrapdb')
+    latest_version = info['versions'][0]
+    version, revision = latest_version.rsplit('-', 1)
+    return version, revision
+
+def install(options: 'argparse.Namespace') -> None:
+    name = options.name
+    if not os.path.isdir('subprojects'):
+        raise SystemExit('Subprojects dir not found. Run this script in your source root directory.')
+    if os.path.isdir(os.path.join('subprojects', name)):
+        raise SystemExit('Subproject directory for this project already exists.')
+    wrapfile = os.path.join('subprojects', name + '.wrap')
+    if os.path.exists(wrapfile):
+        raise SystemExit('Wrap file already exists.')
+    (version, revision) = get_latest_version(name)
+    url = urlopen(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap')
+    with open(wrapfile, 'wb') as f:
+        f.write(url.read())
+    print(f'Installed {name} version {version} revision {revision}')
+
+def parse_patch_url(patch_url: str) -> T.Tuple[str, str]:
+    u = urlparse(patch_url)
+    if u.netloc != 'wrapdb.mesonbuild.com':
+        raise WrapException(f'URL {patch_url} does not seems to be a wrapdb patch')
+    arr = u.path.strip('/').split('/')
+    if arr[0] == 'v1':
+        # e.g. https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/5/get_zip
+        return arr[-3], arr[-2]
+    elif arr[0] == 'v2':
+        # e.g. https://wrapdb.mesonbuild.com/v2/zlib_1.2.11-5/get_patch
+        tag = arr[-2]
+        name, version = tag.rsplit('_', 1)
+        version, revision = version.rsplit('-', 1)
+        return version, revision
+    else:
+        raise WrapException(f'Invalid wrapdb URL {patch_url}')
+
+def get_current_version(wrapfile: str) -> T.Tuple[str, str, str, str, str]:
+    cp = configparser.ConfigParser(interpolation=None)
+    cp.read(wrapfile)
+    wrap_data = cp['wrap-file']
+    patch_url = wrap_data['patch_url']
+    branch, revision = parse_patch_url(patch_url)
+    return branch, revision, wrap_data['directory'], wrap_data['source_filename'], wrap_data['patch_filename']
+
+def update_wrap_file(wrapfile: str, name: str, new_version: str, new_revision: str) -> None:
+    url = urlopen(f'https://wrapdb.mesonbuild.com/v2/{name}_{new_version}-{new_revision}/{name}.wrap')
+    with open(wrapfile, 'wb') as f:
+        f.write(url.read())
+
+def update(options: 'argparse.Namespace') -> None:
+    name = options.name
+    if not os.path.isdir('subprojects'):
+        raise SystemExit('Subprojects dir not found. Run this command in your source root directory.')
+    wrapfile = os.path.join('subprojects', name + '.wrap')
+    if not os.path.exists(wrapfile):
+        raise SystemExit('Project ' + name + ' is not in use.')
+    (branch, revision, subdir, src_file, patch_file) = get_current_version(wrapfile)
+    (new_branch, new_revision) = get_latest_version(name)
+    if new_branch == branch and new_revision == revision:
+        print('Project ' + name + ' is already up to date.')
+        raise SystemExit
+    update_wrap_file(wrapfile, name, new_branch, new_revision)
+    shutil.rmtree(os.path.join('subprojects', subdir), ignore_errors=True)
+    try:
+        os.unlink(os.path.join('subprojects/packagecache', src_file))
+    except FileNotFoundError:
+        pass
+    try:
+        os.unlink(os.path.join('subprojects/packagecache', patch_file))
+    except FileNotFoundError:
+        pass
+    print(f'Updated {name} version {new_branch} revision {new_revision}')
+
+def info(options: 'argparse.Namespace') -> None:
+    name = options.name
+    releases = get_releases()
+    info = releases.get(name)
+    if not info:
+        raise WrapException(f'Wrap {name} not found in wrapdb')
+    print(f'Available versions of {name}:')
+    for v in info['versions']:
+        print(' ', v)
+
+def do_promotion(from_path: str, spdir_name: str) -> None:
+    if os.path.isfile(from_path):
+        assert(from_path.endswith('.wrap'))
+        shutil.copy(from_path, spdir_name)
+    elif os.path.isdir(from_path):
+        sproj_name = os.path.basename(from_path)
+        outputdir = os.path.join(spdir_name, sproj_name)
+        if os.path.exists(outputdir):
+            raise SystemExit(f'Output dir {outputdir} already exists. Will not overwrite.')
+        shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects'))
+
+def promote(options: 'argparse.Namespace') -> None:
+    argument = options.project_path
+    spdir_name = 'subprojects'
+    sprojs = mesonlib.detect_subprojects(spdir_name)
+
+    # check if the argument is a full path to a subproject directory or wrap file
+    system_native_path_argument = argument.replace('/', os.sep)
+    for matches in sprojs.values():
+        if system_native_path_argument in matches:
+            do_promotion(system_native_path_argument, spdir_name)
+            return
+
+    # otherwise the argument is just a subproject basename which must be unambiguous
+    if argument not in sprojs:
+        raise SystemExit(f'Subproject {argument} not found in directory tree.')
+    matches = sprojs[argument]
+    if len(matches) > 1:
+        print(f'There is more than one version of {argument} in tree. Please specify which one to promote:\n', file=sys.stderr)
+        for s in matches:
+            print(s, file=sys.stderr)
+        raise SystemExit(1)
+    do_promotion(matches[0], spdir_name)
+
+def status(options: 'argparse.Namespace') -> None:
+    print('Subproject status')
+    for w in glob('subprojects/*.wrap'):
+        name = os.path.basename(w)[:-5]
+        try:
+            (latest_branch, latest_revision) = get_latest_version(name)
+        except Exception:
+            print('', name, 'not available in wrapdb.', file=sys.stderr)
+            continue
+        try:
+            (current_branch, current_revision, _, _, _) = get_current_version(w)
+        except Exception:
+            print('Wrap file not from wrapdb.', file=sys.stderr)
+            continue
+        if current_branch == latest_branch and current_revision == latest_revision:
+            print('', name, f'up to date. Branch {current_branch}, revision {current_revision}.')
+        else:
+            print('', name, f'not up to date. Have {current_branch} {current_revision}, but {latest_branch} {latest_revision} is available.')
+
+def run(options: 'argparse.Namespace') -> int:
+    options.wrap_func(options)
+    return 0
diff --git a/meson/packaging/License.rtf b/meson/packaging/License.rtf
new file mode 100644
index 000000000..b3945baa7
--- /dev/null
+++ b/meson/packaging/License.rtf
@@ -0,0 +1,73 @@
+{\rtf1\ansi\ansicpg1252\deff0{\fonttbl{\f0\fswiss\fprq2\fcharset0 Arial;}}
+{\colortbl ;\red0\green0\blue255;}
+{\*\generator Msftedit 5.41.21.2510;}\viewkind4\uc1\pard\qc\lang1033\b\f0\fs18 Apache License\par
+Version 2.0, January 2004\par
+{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/"}}{\fldrslt{\ul\cf1 http://www.apache.org/licenses/}}}\f0\fs18\par
+\b0\par
+\pard TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\par
+\par
+\pard\fi-180\li180 1. Definitions.\par
+\par
+\pard\li180 "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.\par
+\par
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.\par
+\par
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.\par
+\par
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.\par
+\par
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.\par
+\par
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.\par
+\par
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).\par
+\par
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.\par
+\par
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."\par
+\par
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.\par
+\pard\par
+\pard\fi-180\li180 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.\par
+\par
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.\par
+\par
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:\par
+\pard\par
+\pard\fi-270\li450 (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and\par
+\par
+(b) You must cause any modified files to carry prominent notices stating that You changed the files; and\par
+\par
+(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and\par
+\par
+(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.\par
+\pard\par
+\pard\li180 You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or  for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.\par
+\pard\par
+\pard\fi-180\li180 5. Submission of Contributions. Unless You explicitly state otherwise,  any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.\par
+\par
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.\par
+\par
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.\par
+\par
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.\par
+\par
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.\par
+\pard\par
+END OF TERMS AND CONDITIONS\par
+\par
+APPENDIX: How to apply the Apache License to your work.\par
+\par
+To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!)  The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.\par
+\par
+\pard\li180 Copyright [yyyy] [name of copyright owner]\par
+\par
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.  You may obtain a copy of the License at\par
+\par
+\pard\li360{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt{\ul\cf1 http://www.apache.org/licenses/LICENSE-2.0}}}\f0\fs18\par
+\pard\li180\par
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.\par
+\pard\par
+\par
+}
+
\ No newline at end of file
diff --git a/meson/packaging/create_zipapp.py b/meson/packaging/create_zipapp.py
new file mode 100755
index 000000000..4e018bf51
--- /dev/null
+++ b/meson/packaging/create_zipapp.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python3
+
+import argparse
+from pathlib import Path
+import shutil
+import sys
+import tempfile
+import zipapp
+
+parser = argparse.ArgumentParser()
+parser.add_argument('source', nargs='?', default='.', help='Source directory')
+parser.add_argument('--outfile', default='meson.pyz', help='Output file for the zipapp')
+parser.add_argument('--interpreter', default='/usr/bin/env python3', help='The name of the Python interpreter to use')
+
+options = parser.parse_args(sys.argv[1:])
+
+source = Path(options.source).resolve()
+
+with tempfile.TemporaryDirectory() as d:
+    shutil.copy2(source / 'meson.py', Path(d, '__main__.py'))
+    shutil.copytree(source / 'mesonbuild', Path(d, 'mesonbuild'))
+    zipapp.create_archive(d, interpreter=options.interpreter, target=options.outfile)
diff --git a/meson/packaging/createmsi.py b/meson/packaging/createmsi.py
new file mode 100755
index 000000000..c55688a0d
--- /dev/null
+++ b/meson/packaging/createmsi.py
@@ -0,0 +1,372 @@
+#!/usr/bin/env python3
+
+# Copyright 2017-2021 The Meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+This script is for generating MSI packages
+for Windows users.
+'''
+
+import subprocess
+import shutil
+import uuid
+import sys
+import os
+from glob import glob
+import xml.etree.ElementTree as ET
+
+sys.path.append(os.getcwd())
+from mesonbuild import coredata
+
+# Elementtree does not support CDATA. So hack it.
+WINVER_CHECK = ' 602)]]>'
+
+def gen_guid():
+    '''
+       Generate guid
+    '''
+    return str(uuid.uuid4()).upper()
+
+def get_all_modules_from_dir(dirname):
+    '''
+    Get all modules required for Meson build MSI package
+    from directories.
+    '''
+    modname = os.path.basename(dirname)
+    modules = [os.path.splitext(os.path.split(x)[1])[0] for x in glob(os.path.join(dirname, '*'))]
+    modules = ['mesonbuild.' + modname + '.' + x for x in modules if not x.startswith('_')]
+    return modules
+
+def get_more_modules():
+    '''
+        Getter for missing Modules.
+        Python packagers want to be minimal and only copy the things
+        that they can see that being used. They are blind to many things.
+    '''
+    return ['distutils.archive_util',
+            'distutils.cmd',
+            'distutils.config',
+            'distutils.core',
+            'distutils.debug',
+            'distutils.dep_util',
+            'distutils.dir_util',
+            'distutils.dist',
+            'distutils.errors',
+            'distutils.extension',
+            'distutils.fancy_getopt',
+            'distutils.file_util',
+            'distutils.spawn',
+            'distutils.util',
+            'distutils.version',
+            'distutils.command.build_ext',
+            'distutils.command.build',
+            'filecmp',
+            ]
+
+def get_modules():
+    modules = get_all_modules_from_dir('mesonbuild/modules')
+    modules += get_all_modules_from_dir('mesonbuild/scripts')
+    modules += get_more_modules()
+    return modules
+
+class Node:
+    '''
+       Node to hold path and directory values
+    '''
+
+    def __init__(self, dirs, files):
+        self.check_dirs(dirs)
+        self.check_files(files)
+        self.dirs = dirs
+        self.files = files
+
+    @staticmethod
+    def check_dirs(dirs):
+        '''
+           Check to see if directory is instance of list
+        '''
+        assert isinstance(dirs, list)
+
+    @staticmethod
+    def check_files(files):
+        '''
+           Check to see if files is instance of list
+        '''
+        assert isinstance(files, list)
+
+
+class PackageGenerator:
+    '''
+       Package generator for MSI pacakges
+    '''
+
+    def __init__(self):
+        self.product_name = 'Meson Build System'
+        self.manufacturer = 'The Meson Development Team'
+        self.version = coredata.version.replace('dev', '')
+        self.root = None
+        self.guid = '*'
+        self.update_guid = '141527EE-E28A-4D14-97A4-92E6075D28B2'
+        self.main_xml = 'meson.wxs'
+        self.main_o = 'meson.wixobj'
+        self.final_output = f'meson-{self.version}-64.msi'
+        self.staging_dirs = ['dist', 'dist2']
+        self.progfile_dir = 'ProgramFiles64Folder'
+        redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\v*\\MergeModules\\Microsoft_VC142_CRT_x64.msm'
+        trials = glob(redist_glob)
+        if len(trials) != 1:
+            sys.exit('Could not find unique MSM setup:' + '\n'.join(trials))
+        self.redist_path = trials[0]
+        self.component_num = 0
+        self.feature_properties = {
+            self.staging_dirs[0]: {
+                'Id': 'MainProgram',
+                'Title': 'Meson',
+                'Description': 'Meson executables',
+                'Level': '1',
+                'Absent': 'disallow',
+            },
+            self.staging_dirs[1]: {
+                'Id': 'NinjaProgram',
+                'Title': 'Ninja',
+                'Description': 'Ninja build tool',
+                'Level': '1',
+            }
+        }
+        self.feature_components = {}
+        for s_d in self.staging_dirs:
+            self.feature_components[s_d] = []
+
+    def build_dist(self):
+        '''
+           Build dist file from PyInstaller info
+        '''
+        for sdir in self.staging_dirs:
+            if os.path.exists(sdir):
+                shutil.rmtree(sdir)
+        main_stage, ninja_stage = self.staging_dirs
+        modules = get_modules()
+
+        pyinstaller = shutil.which('pyinstaller')
+        if not pyinstaller:
+            print("ERROR: This script requires pyinstaller.")
+            sys.exit(1)
+
+        pyinstaller_tmpdir = 'pyinst-tmp'
+        if os.path.exists(pyinstaller_tmpdir):
+            shutil.rmtree(pyinstaller_tmpdir)
+        pyinst_cmd = [pyinstaller,
+                      '--clean',
+                      '--distpath',
+                      pyinstaller_tmpdir]
+        for m in modules:
+            pyinst_cmd += ['--hidden-import', m]
+        pyinst_cmd += ['meson.py']
+        subprocess.check_call(pyinst_cmd)
+        shutil.move(pyinstaller_tmpdir + '/meson', main_stage)
+        self.del_infodirs(main_stage)
+        if not os.path.exists(os.path.join(main_stage, 'meson.exe')):
+            sys.exit('Meson exe missing from staging dir.')
+        os.mkdir(ninja_stage)
+        shutil.copy(shutil.which('ninja'), ninja_stage)
+        if not os.path.exists(os.path.join(ninja_stage, 'ninja.exe')):
+            sys.exit('Ninja exe missing from staging dir.')
+
+    def del_infodirs(self, dirname):
+        # Starting with 3.9.something there are some
+        # extra metadatadirs that have a hyphen in their
+        # file names. This is a forbidden character in WiX
+        # filenames so delete them.
+        for d in glob(os.path.join(dirname, '*-info')):
+            shutil.rmtree(d)
+
+    def generate_files(self):
+        '''
+           Generate package files for MSI installer package
+        '''
+        self.root = ET.Element('Wix', {'xmlns': 'http://schemas.microsoft.com/wix/2006/wi'})
+        product = ET.SubElement(self.root, 'Product', {
+            'Name': self.product_name,
+            'Manufacturer': 'The Meson Development Team',
+            'Id': self.guid,
+            'UpgradeCode': self.update_guid,
+            'Language': '1033',
+            'Codepage':  '1252',
+            'Version': self.version,
+        })
+
+        package = ET.SubElement(product, 'Package', {
+            'Id': '*',
+            'Keywords': 'Installer',
+            'Description': f'Meson {self.version} installer',
+            'Comments': 'Meson is a high performance build system',
+            'Manufacturer': 'The Meson Development Team',
+            'InstallerVersion': '500',
+            'Languages': '1033',
+            'Compressed': 'yes',
+            'SummaryCodepage': '1252',
+        })
+
+        condition = ET.SubElement(product, 'Condition', {'Message': 'This application is only supported on Windows 10 or higher.'})
+
+        condition.text = 'X'*len(WINVER_CHECK)
+        ET.SubElement(product, 'MajorUpgrade',
+                      {'DowngradeErrorMessage': 'A newer version of Meson is already installed.'})
+
+        package.set('Platform', 'x64')
+        ET.SubElement(product, 'Media', {
+            'Id': '1',
+            'Cabinet': 'meson.cab',
+            'EmbedCab': 'yes',
+        })
+        targetdir = ET.SubElement(product, 'Directory', {
+            'Id': 'TARGETDIR',
+            'Name': 'SourceDir',
+        })
+        progfiledir = ET.SubElement(targetdir, 'Directory', {
+            'Id': self.progfile_dir,
+        })
+        installdir = ET.SubElement(progfiledir, 'Directory', {
+            'Id': 'INSTALLDIR',
+            'Name': 'Meson',
+        })
+        ET.SubElement(installdir, 'Merge', {
+            'Id': 'VCRedist',
+            'SourceFile': self.redist_path,
+            'DiskId': '1',
+            'Language': '0',
+        })
+
+        ET.SubElement(product, 'Property', {
+            'Id': 'WIXUI_INSTALLDIR',
+            'Value': 'INSTALLDIR',
+        })
+        ET.SubElement(product, 'UIRef', {
+            'Id': 'WixUI_FeatureTree',
+        })
+        for s_d in self.staging_dirs:
+            assert os.path.isdir(s_d)
+        top_feature = ET.SubElement(product, 'Feature', {
+            'Id': 'Complete',
+            'Title': 'Meson ' + self.version,
+            'Description': 'The complete package',
+            'Display': 'expand',
+            'Level': '1',
+            'ConfigurableDirectory': 'INSTALLDIR',
+        })
+        for s_d in self.staging_dirs:
+            nodes = {}
+            for root, dirs, files in os.walk(s_d):
+                cur_node = Node(dirs, files)
+                nodes[root] = cur_node
+            self.create_xml(nodes, s_d, installdir, s_d)
+            self.build_features(top_feature, s_d)
+        vcredist_feature = ET.SubElement(top_feature, 'Feature', {
+            'Id': 'VCRedist',
+            'Title': 'Visual C++ runtime',
+            'AllowAdvertise': 'no',
+            'Display': 'hidden',
+            'Level': '1',
+        })
+        ET.SubElement(vcredist_feature, 'MergeRef', {'Id': 'VCRedist'})
+        ET.ElementTree(self.root).write(self.main_xml, encoding='utf-8', xml_declaration=True)
+        # ElementTree can not do prettyprinting so do it manually
+        import xml.dom.minidom
+        doc = xml.dom.minidom.parse(self.main_xml)
+        with open(self.main_xml, 'w') as open_file:
+            open_file.write(doc.toprettyxml())
+        # One last fix, add CDATA.
+        with open(self.main_xml) as open_file:
+            data = open_file.read()
+        data = data.replace('X'*len(WINVER_CHECK), WINVER_CHECK)
+        with open(self.main_xml, 'w') as open_file:
+            open_file.write(data)
+
+    def build_features(self, top_feature, staging_dir):
+        '''
+           Generate build features
+        '''
+        feature = ET.SubElement(top_feature, 'Feature', self.feature_properties[staging_dir])
+        for component_id in self.feature_components[staging_dir]:
+            ET.SubElement(feature, 'ComponentRef', {
+                'Id': component_id,
+            })
+
+    def create_xml(self, nodes, current_dir, parent_xml_node, staging_dir):
+        '''
+           Create XML file
+        '''
+        cur_node = nodes[current_dir]
+        if cur_node.files:
+            component_id = f'ApplicationFiles{self.component_num}'
+            comp_xml_node = ET.SubElement(parent_xml_node, 'Component', {
+                'Id': component_id,
+                'Guid': gen_guid(),
+            })
+            self.feature_components[staging_dir].append(component_id)
+            comp_xml_node.set('Win64', 'yes')
+            if self.component_num == 0:
+                ET.SubElement(comp_xml_node, 'Environment', {
+                    'Id': 'Environment',
+                    'Name': 'PATH',
+                    'Part': 'last',
+                    'System': 'yes',
+                    'Action': 'set',
+                    'Value': '[INSTALLDIR]',
+                })
+            self.component_num += 1
+            for f_node in cur_node.files:
+                file_id = os.path.join(current_dir, f_node).replace('\\', '_').replace('#', '_').replace('-', '_')
+                ET.SubElement(comp_xml_node, 'File', {
+                    'Id': file_id,
+                    'Name': f_node,
+                    'Source': os.path.join(current_dir, f_node),
+                })
+
+        for dirname in cur_node.dirs:
+            dir_id = os.path.join(current_dir, dirname).replace('\\', '_').replace('/', '_')
+            dir_node = ET.SubElement(parent_xml_node, 'Directory', {
+                'Id': dir_id,
+                'Name': dirname,
+            })
+            self.create_xml(nodes, os.path.join(current_dir, dirname), dir_node, staging_dir)
+
+    def build_package(self):
+        '''
+           Generate the Meson build MSI package.
+        '''
+        wixdir = 'c:\\Program Files\\Wix Toolset v3.11\\bin'
+        if not os.path.isdir(wixdir):
+            wixdir = 'c:\\Program Files (x86)\\Wix Toolset v3.11\\bin'
+        if not os.path.isdir(wixdir):
+            print("ERROR: This script requires WIX")
+            sys.exit(1)
+        subprocess.check_call([os.path.join(wixdir, 'candle'), self.main_xml])
+        subprocess.check_call([os.path.join(wixdir, 'light'),
+                               '-ext', 'WixUIExtension',
+                               '-cultures:en-us',
+                               '-dWixUILicenseRtf=packaging\\License.rtf',
+                               '-out', self.final_output,
+                               self.main_o])
+
+if __name__ == '__main__':
+    if not os.path.exists('meson.py'):
+        sys.exit(print('Run me in the top level source dir.'))
+    subprocess.check_call(['pip', 'install', '--upgrade', 'pyinstaller'])
+
+    p = PackageGenerator()
+    p.build_dist()
+    p.generate_files()
+    p.build_package()
diff --git a/meson/packaging/createpkg.py b/meson/packaging/createpkg.py
new file mode 100755
index 000000000..391e730d7
--- /dev/null
+++ b/meson/packaging/createpkg.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python3
+
+# Copyright 2017-2021 The Meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import shutil, sys, os
+
+import xml.etree.ElementTree as ET
+
+sys.path.append(os.getcwd())
+from mesonbuild import coredata
+
+from createmsi import get_modules
+
+class PkgGenerator:
+
+    def __init__(self):
+        self.pkg_dir = 'macpkg'
+        self.sharedir = os.path.join(self.pkg_dir, 'usr/local/share')
+        self.bindir = os.path.join(self.pkg_dir, 'usr/local/bin')
+        self.product_name = 'Meson Build System'
+        self.identifier = 'com.mesonbuild.meson'
+        self.version = coredata.version.replace('dev', '')
+        self.mesonstashdir = os.path.join(self.sharedir, f'meson-{self.version}')
+        self.pkgname = f'meson.pkg'
+        self.productname = f'meson-{self.version}.pkg'
+        self.distribution_file = 'meson-distribution.xml'
+        self.resourcedir = 'packaging/macpages'
+
+    def build_dist(self):
+        if os.path.exists(self.pkg_dir):
+            shutil.rmtree(self.pkg_dir)
+        os.mkdir(self.pkg_dir)
+        pyinstaller_bin = '/Users/jpakkane/Library/Python/3.8/bin/pyinstaller'
+        pyinst_cmd = [pyinstaller_bin,
+                      '--clean',
+                      '--distpath',
+                      self.pkg_dir]
+        for m in get_modules():
+            pyinst_cmd += ['--hidden-import', m]
+        pyinst_cmd += ['meson.py']
+        subprocess.check_call(pyinst_cmd    )
+        tmpdir = os.path.join(self.pkg_dir, 'meson')
+        shutil.move(tmpdir, self.mesonstashdir)
+        os.makedirs(self.bindir)
+        ln_base = os.path.relpath(self.mesonstashdir, self.bindir)
+        ninja_bin = shutil.which('ninja')
+        assert(ninja_bin)
+        shutil.copy(ninja_bin, self.bindir)
+        subprocess.check_call(['strip', os.path.join(self.bindir, 'ninja')])
+        os.symlink(os.path.join(ln_base, 'meson'), os.path.join(self.bindir, 'meson'))
+
+    def build_package(self):
+        subprocess.check_call(['pkgbuild',
+                                '--root',
+                                self.pkg_dir,
+                                '--identifier',
+                                self.identifier,
+                                self.pkgname])
+        self.generate_distribution()
+        subprocess.check_call(['productbuild',
+                               '--distribution',
+                               self.distribution_file,
+                               '--resources',
+                               self.resourcedir,
+                               self.productname])
+
+    def generate_distribution(self):
+        root = ET.Element('installer-gui-script', {'minSpecVersion': '1'})
+        ET.SubElement(root, 'welcome', {'file': 'welcome.html',
+                                        'mime-type': 'text/html'})
+        ET.SubElement(root, 'license', {'file': 'license.html',
+                                        'mime-type': 'text/html'})
+        ET.SubElement(root, 'conclusion', {'file': 'conclusion.html',
+                                        'mime-type': 'text/html'})
+        ET.SubElement(root, 'pkg-ref', {'id': self.identifier})
+        ET.SubElement(root, 'options', {'customize': 'never',
+                                        'require-scripts': 'false',
+                                        'hostArhcitectures': 'x86_64,arm64'})
+        choices_outline = ET.SubElement(root, 'choices-outline')
+        line = ET.SubElement(choices_outline, 'line', {'choice': 'default'})
+        ET.SubElement(line, 'line', {'choice': self.identifier})
+        ET.SubElement(root, 'choice', {'id': 'default'})
+        choice = ET.SubElement(root, 'choice', {'id': self.identifier, 'visible': 'false'})
+        ET.SubElement(choice, 'pkg-ref', {'id': self.identifier})
+        ET.SubElement(root, 'pkg-ref', {'id': self.identifier,
+                                        'version': '0',#self.version,
+                                        'onConclusion': 'none'}).text = self.pkgname
+        ET.ElementTree(root).write(self.distribution_file, encoding='utf-8', xml_declaration=True)
+        # ElementTree can not do prettyprinting so do it manually
+        import xml.dom.minidom
+        doc = xml.dom.minidom.parse(self.distribution_file)
+        with open(self.distribution_file, 'w') as open_file:
+            open_file.write(doc.toprettyxml())
+
+
+if __name__ == '__main__':
+    if not os.path.exists('meson.py'):
+        sys.exit(print('Run me in the top level source dir.'))
+    subprocess.check_call(['pip3', 'install', '--user', '--upgrade', 'pyinstaller'])
+
+    pg = PkgGenerator()
+    pg.build_dist()
+    pg.build_package()
+
diff --git a/meson/packaging/macpages/English.lproj/conclusion.html b/meson/packaging/macpages/English.lproj/conclusion.html
new file mode 100644
index 000000000..052245598
--- /dev/null
+++ b/meson/packaging/macpages/English.lproj/conclusion.html
@@ -0,0 +1,24 @@
+
+  
+  
+  
+
+    

Install finished

+ +

The Meson build system is now installed. Note that Meson does not + provide any GUI applications, it is only usable from the command + line. You can verify if your installation of Meson is working by + running the following command in a terminal +

+ +
+      $ meson --version
+    
+ +

If the system reports that the program could not be found + you might need to edit your configuration files so + that /usr/local/bin is in your path. +

+ + + diff --git a/meson/packaging/macpages/English.lproj/license.html b/meson/packaging/macpages/English.lproj/license.html new file mode 100644 index 000000000..235e5829a --- /dev/null +++ b/meson/packaging/macpages/English.lproj/license.html @@ -0,0 +1,209 @@ + + + + +
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+  
+  
+
diff --git a/meson/packaging/macpages/English.lproj/welcome.html b/meson/packaging/macpages/English.lproj/welcome.html
new file mode 100644
index 000000000..3832223e5
--- /dev/null
+++ b/meson/packaging/macpages/English.lproj/welcome.html
@@ -0,0 +1,12 @@
+
+  
+  
+  
+
+  

Meson build system installer

+ +

This package will install the command line tools of Meson to + this computer. +

+ + diff --git a/meson/pyproject.toml b/meson/pyproject.toml new file mode 100644 index 000000000..d1e6ae6e5 --- /dev/null +++ b/meson/pyproject.toml @@ -0,0 +1,2 @@ +[build-system] +requires = ["setuptools", "wheel"] diff --git a/meson/run_cross_test.py b/meson/run_cross_test.py new file mode 100755 index 000000000..bafdbb67d --- /dev/null +++ b/meson/run_cross_test.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +# Copyright 2013-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''Runs the basic test suite through a cross compiler. + +This is now just a wrapper around run_project_tests.py with specific arguments +''' + +import argparse +import subprocess +from mesonbuild import mesonlib +from mesonbuild.coredata import version as meson_version +from pathlib import Path +import json +import os + + +def runtests(cross_file, failfast, cross_only, test_list, env=None): + tests = ['--only'] + test_list + if not cross_only: + tests.append('native') + cmd = mesonlib.python_command + ['run_project_tests.py', '--backend', 'ninja'] + if failfast: + cmd += ['--failfast'] + cmd += tests + cmd += ['--cross-file', cross_file] + if cross_only: + cmd += ['--native-file', 'cross/none.txt'] + return subprocess.call(cmd, env=env) + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--failfast', action='store_true') + parser.add_argument('--cross-only', action='store_true') + parser.add_argument('cross_file') + options = parser.parse_args() + cf_path = Path(options.cross_file) + try: + data = json.loads(cf_path.read_text(encoding='utf-8')) + real_cf = cf_path.resolve().parent / data['file'] + assert real_cf.exists() + env = os.environ.copy() + env.update(data['env']) + return runtests(real_cf.as_posix(), options.failfast, options.cross_only, data['tests'], env=env) + except Exception: + return runtests(options.cross_file, options.failfast, options.cross_only, ['common']) + +if __name__ == '__main__': + print('Meson build system', meson_version, 'Cross Tests') + raise SystemExit(main()) diff --git a/meson/run_custom_lint.py b/meson/run_custom_lint.py new file mode 100755 index 000000000..89de9506e --- /dev/null +++ b/meson/run_custom_lint.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +from pathlib import Path +import typing as T + +root = Path(__file__).absolute().parent +mesonbuild = root / 'mesonbuild' + +whitelist = ['mesonbuild/', 'run_', 'ci/', 'tools/', 'docs/'] + +def check_missing_encoding(lines: T.List[str], path: str) -> int: + errors = 0 + functions = ['read_text', 'write_text', 'open'] + for num, line in enumerate(lines): + for func in functions: + l = line + + # Skip ignored lines + if '[ignore encoding]' in l: + continue + + # Do we have a match? + loc = l.find(func + '(') + if loc < 0: + continue + if loc > 0 and ord(l[loc-1].lower()) in [*range(ord('a'), ord('z')), *range(ord('0'), ord('9')), '_']: + continue + loc += len(func) + 1 + # Some preprocessign to make parsing easier + l = l[loc:] + l = l.replace(' ', '') + l = l.replace('\t', '') + l = l.replace('\n', '') + l = l.replace('\'', '"') + + # Parameter begin + args = '' + b_open = 1 + while l: + c = l[0] + l = l[1:] + if c == ')': + b_open -= 1 + if b_open == 0: + break + elif b_open == 1: + args += c + if c == '(': + b_open += 1 + + binary_modes = ['rb', 'br', 'r+b', 'wb', 'bw', 'ab', 'ba'] + is_binary = any([f'"{x}"' in args for x in binary_modes]) + if 'encoding=' not in args and not (func == 'open' and is_binary): + location = f'\x1b[33;1m[\x1b[0;1m{path}:{num+1}\x1b[33m]\x1b[0m' + #print(f'{location:<64}: \x1b[31;1mERROR:\x1b[0m Missing `encoding=` parameter in "{line.strip()}"') + print(f'{location:<72}: \x1b[31;1mERROR:\x1b[0m Missing `encoding=` parameter in `{func}` call') + errors += 1 + return errors + +def main() -> int: + print('Scanning mesonbuild...') + errors = 0 + for i in sorted(root.glob('**/*.py')): + raw = i.read_text(encoding='utf-8') + lines = raw.splitlines() + filename = i.relative_to(root).as_posix() + + if not any([filename.startswith(x) for x in whitelist]): + continue + + errors += check_missing_encoding(lines, filename) + print(f'Found {errors} errors while scanning mesonbuild') + return 0 if errors == 0 else 1 + +if __name__ == '__main__': + raise SystemExit(main()) diff --git a/meson/run_format_tests.py b/meson/run_format_tests.py new file mode 100644 index 000000000..1f41f3d10 --- /dev/null +++ b/meson/run_format_tests.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 + +# Copyright 2012-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# some simple checks on the file format of: +# - python code +# - code samples in tests +# - markdown documentation +# +# checks are: +# - no use of tabs +# - no use of DOS line endings + +import os +import re +from pathlib import Path + +def check_file(file: Path) -> None: + lines = file.read_bytes().split(b'\n') + tabdetector = re.compile(br' *\t') + for i, line in enumerate(lines): + if re.match(tabdetector, line): + raise SystemExit("File {} contains a tab indent on line {:d}. Only spaces are permitted.".format(file, i + 1)) + if line.endswith(b'\r'): + raise SystemExit("File {} contains DOS line ending on line {:d}. Only unix-style line endings are permitted.".format(file, i + 1)) + +def check_format() -> None: + check_suffixes = {'.c', + '.cpp', + '.cxx', + '.cc', + '.rs', + '.f90', + '.vala', + '.d', + '.s', + '.m', + '.mm', + '.asm', + '.java', + '.txt', + '.py', + '.swift', + '.build', + '.md', + } + skip_dirs = { + '.dub', # external deps are here + '.pytest_cache', + 'meson-logs', 'meson-private', + 'work area', + '.eggs', '_cache', # e.g. .mypy_cache + 'venv', # virtualenvs have DOS line endings + } + for (root, _, filenames) in os.walk('.'): + if any([x in root for x in skip_dirs]): + continue + for fname in filenames: + file = Path(fname) + if file.suffix.lower() in check_suffixes: + if file.name in ('sitemap.txt', 'meson-test-run.txt'): + continue + check_file(root / file) + + +if __name__ == '__main__': + script_dir = os.path.split(__file__)[0] + if script_dir != '': + os.chdir(script_dir) + check_format() diff --git a/meson/run_meson_command_tests.py b/meson/run_meson_command_tests.py new file mode 100755 index 000000000..45096c607 --- /dev/null +++ b/meson/run_meson_command_tests.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 + +# Copyright 2018 The Meson development team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import tempfile +import unittest +import subprocess +import zipapp +from pathlib import Path + +from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows +from mesonbuild.coredata import version as meson_version + + +def get_pypath(): + import sysconfig + pypath = sysconfig.get_path('purelib', vars={'base': ''}) + # Ensure that / is the path separator and not \, then strip / + return Path(pypath).as_posix().strip('/') + +def get_pybindir(): + import sysconfig + # 'Scripts' on Windows and 'bin' on other platforms including MSYS + return sysconfig.get_path('scripts', vars={'base': ''}).strip('\\/') + +class CommandTests(unittest.TestCase): + ''' + Test that running meson in various ways works as expected by checking the + value of mesonlib.meson_command that was set during configuration. + ''' + + def setUp(self): + super().setUp() + self.orig_env = os.environ.copy() + self.orig_dir = os.getcwd() + os.environ['MESON_COMMAND_TESTS'] = '1' + self.tmpdir = Path(tempfile.mkdtemp()).resolve() + self.src_root = Path(__file__).resolve().parent + self.testdir = str(self.src_root / 'test cases/common/1 trivial') + self.meson_args = ['--backend=ninja'] + + def tearDown(self): + try: + windows_proof_rmtree(str(self.tmpdir)) + except FileNotFoundError: + pass + os.environ.clear() + os.environ.update(self.orig_env) + os.chdir(str(self.orig_dir)) + super().tearDown() + + def _run(self, command, workdir=None): + ''' + Run a command while printing the stdout, and also return a copy of it + ''' + # If this call hangs CI will just abort. It is very hard to distinguish + # between CI issue and test bug in that case. Set timeout and fail loud + # instead. + p = subprocess.run(command, stdout=subprocess.PIPE, + env=os.environ.copy(), universal_newlines=True, + cwd=workdir, timeout=60 * 5) + print(p.stdout) + if p.returncode != 0: + raise subprocess.CalledProcessError(p.returncode, command) + return p.stdout + + def assertMesonCommandIs(self, line, cmd): + self.assertTrue(line.startswith('meson_command '), msg=line) + self.assertEqual(line, f'meson_command is {cmd!r}') + + def test_meson_uninstalled(self): + # This is what the meson command must be for all these cases + resolved_meson_command = python_command + [str(self.src_root / 'meson.py')] + # Absolute path to meson.py + os.chdir('/') + builddir = str(self.tmpdir / 'build1') + meson_py = str(self.src_root / 'meson.py') + meson_setup = [meson_py, 'setup'] + meson_command = python_command + meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + # ./meson.py + os.chdir(str(self.src_root)) + builddir = str(self.tmpdir / 'build2') + meson_py = './meson.py' + meson_setup = [meson_py, 'setup'] + meson_command = python_command + meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + # Symlink to meson.py + if is_windows(): + # Symlinks require admin perms + return + os.chdir(str(self.src_root)) + builddir = str(self.tmpdir / 'build3') + # Create a symlink to meson.py in bindir, and add it to PATH + bindir = (self.tmpdir / 'bin') + bindir.mkdir() + (bindir / 'meson').symlink_to(self.src_root / 'meson.py') + os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] + # See if it works! + meson_py = 'meson' + meson_setup = [meson_py, 'setup'] + meson_command = meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + + def test_meson_installed(self): + # Install meson + prefix = self.tmpdir / 'prefix' + pylibdir = prefix / get_pypath() + bindir = prefix / get_pybindir() + pylibdir.mkdir(parents=True) + # XXX: join with empty name so it always ends with os.sep otherwise + # distutils complains that prefix isn't contained in PYTHONPATH + os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '') + os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] + self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)]) + # Fix importlib-metadata by appending all dirs in pylibdir + PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()] + PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS] + os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS) + # Check that all the files were installed correctly + self.assertTrue(bindir.is_dir()) + self.assertTrue(pylibdir.is_dir()) + # Run `meson` + os.chdir('/') + resolved_meson_command = [str(bindir / 'meson')] + builddir = str(self.tmpdir / 'build1') + meson_setup = ['meson', 'setup'] + meson_command = meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + # Run `/path/to/meson` + builddir = str(self.tmpdir / 'build2') + meson_setup = [str(bindir / 'meson'), 'setup'] + meson_command = meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + # Run `python3 -m mesonbuild.mesonmain` + resolved_meson_command = python_command + ['-m', 'mesonbuild.mesonmain'] + builddir = str(self.tmpdir / 'build3') + meson_setup = ['-m', 'mesonbuild.mesonmain', 'setup'] + meson_command = python_command + meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + if is_windows(): + # Next part requires a shell + return + # `meson` is a wrapper to `meson.real` + resolved_meson_command = [str(bindir / 'meson.real')] + builddir = str(self.tmpdir / 'build4') + (bindir / 'meson').rename(bindir / 'meson.real') + wrapper = (bindir / 'meson') + wrapper.write_text('#!/bin/sh\n\nmeson.real "$@"', encoding='utf-8') + wrapper.chmod(0o755) + meson_setup = [str(wrapper), 'setup'] + meson_command = meson_setup + self.meson_args + stdo = self._run(meson_command + [self.testdir, builddir]) + self.assertMesonCommandIs(stdo.split('\n')[0], resolved_meson_command) + + def test_meson_exe_windows(self): + raise unittest.SkipTest('NOT IMPLEMENTED') + + def test_meson_zipapp(self): + if is_windows(): + raise unittest.SkipTest('NOT IMPLEMENTED') + source = Path(__file__).resolve().parent + target = self.tmpdir / 'meson.pyz' + script = source / 'packaging' / 'create_zipapp.py' + self._run([script.as_posix(), source, '--outfile', target, '--interpreter', python_command[0]]) + self._run([target.as_posix(), '--help']) + + +if __name__ == '__main__': + print('Meson build system', meson_version, 'Command Tests') + raise SystemExit(unittest.main(buffer=True)) diff --git a/meson/run_mypy.py b/meson/run_mypy.py new file mode 100755 index 000000000..906bfe39b --- /dev/null +++ b/meson/run_mypy.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +from pathlib import Path +import argparse +import os +import subprocess +import sys +import typing as T + +from mesonbuild.mesonlib import version_compare + +modules = [ + # fully typed submodules + # 'mesonbuild/ast', + 'mesonbuild/cmake', + 'mesonbuild/compilers', + 'mesonbuild/dependencies', + 'mesonbuild/interpreterbase', + 'mesonbuild/linkers', + 'mesonbuild/scripts', + 'mesonbuild/wrap', + + # specific files + 'mesonbuild/arglist.py', + # 'mesonbuild/coredata.py', + 'mesonbuild/envconfig.py', + 'mesonbuild/interpreter/interpreterobjects.py', + 'mesonbuild/mcompile.py', + 'mesonbuild/mdevenv.py', + 'mesonbuild/mesonlib/platform.py', + 'mesonbuild/mesonlib/universal.py', + 'mesonbuild/minit.py', + 'mesonbuild/minstall.py', + 'mesonbuild/mintro.py', + 'mesonbuild/mlog.py', + 'mesonbuild/modules/fs.py', + 'mesonbuild/modules/unstable_rust.py', + 'mesonbuild/modules/qt.py', + 'mesonbuild/mparser.py', + 'mesonbuild/msetup.py', + 'mesonbuild/mtest.py', + 'mesonbuild/optinterpreter.py', + 'mesonbuild/programs.py', + + 'run_custom_lint.py', + 'run_mypy.py', + 'run_project_tests.py', + 'run_single_test.py', + 'tools' +] + +if os.name == 'posix': + modules.append('mesonbuild/mesonlib/posix.py') +elif os.name == 'nt': + modules.append('mesonbuild/mesonlib/win32.py') + +def check_mypy() -> None: + try: + import mypy + except ImportError: + print('Failed import mypy') + sys.exit(1) + from mypy.version import __version__ as mypy_version + if not version_compare(mypy_version, '>=0.812'): + print('mypy >=0.812 is required, older versions report spurious errors') + sys.exit(1) + +def main() -> int: + check_mypy() + + root = Path(__file__).absolute().parent + args = [] # type: T.List[str] + + parser = argparse.ArgumentParser(description='Process some integers.') + parser.add_argument('-p', '--pretty', action='store_true', help='pretty print mypy errors') + parser.add_argument('-C', '--clear', action='store_true', help='clear the terminal before running mypy') + + opts = parser.parse_args() + if opts.pretty: + args.append('--pretty') + + if opts.clear: + print('\x1bc', end='', flush=True) + + print('Running mypy (this can take some time) ...') + p = subprocess.run( + [sys.executable, '-m', 'mypy'] + args + modules, + cwd=root, + ) + return p.returncode + +if __name__ == '__main__': + sys.exit(main()) diff --git a/meson/run_project_tests.py b/meson/run_project_tests.py new file mode 100755 index 000000000..289aef931 --- /dev/null +++ b/meson/run_project_tests.py @@ -0,0 +1,1593 @@ +#!/usr/bin/env python3 + +# Copyright 2012-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Work around some pathlib bugs... +from mesonbuild import _pathlib +import sys +sys.modules['pathlib'] = _pathlib + +from concurrent.futures import ProcessPoolExecutor, CancelledError +from enum import Enum +from io import StringIO +from pathlib import Path, PurePath +import argparse +import functools +import itertools +import json +import multiprocessing +import os +import re +import shlex +import shutil +import signal +import subprocess +import tempfile +import time +import typing as T +import xml.etree.ElementTree as ET +import collections + +from mesonbuild import build +from mesonbuild import environment +from mesonbuild import compilers +from mesonbuild import mesonlib +from mesonbuild import mlog +from mesonbuild import mtest +from mesonbuild.compilers import compiler_from_language, detect_objc_compiler, detect_objcpp_compiler +from mesonbuild.build import ConfigurationData +from mesonbuild.mesonlib import MachineChoice, Popen_safe, TemporaryDirectoryWinProof +from mesonbuild.mlog import blue, bold, cyan, green, red, yellow, normal_green +from mesonbuild.coredata import backendlist, version as meson_version +from mesonbuild.mesonmain import setup_vsenv +from mesonbuild.modules.python import PythonExternalProgram +from run_tests import get_fake_options, run_configure, get_meson_script +from run_tests import get_backend_commands, get_backend_args_for_dir, Backend +from run_tests import ensure_backend_detects_changes +from run_tests import guess_backend + +if T.TYPE_CHECKING: + from types import FrameType + from mesonbuild.environment import Environment + from mesonbuild._typing import Protocol + from concurrent.futures import Future + + class CompilerArgumentType(Protocol): + cross_file: str + native_file: str + use_tmpdir: bool + + + class ArgumentType(CompilerArgumentType): + + """Typing information for command line arguments.""" + + extra_args: T.List[str] + backend: str + num_workers: int + failfast: bool + no_unittests: bool + only: T.List[str] + +ALL_TESTS = ['cmake', 'common', 'native', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test', + 'keyval', 'platform-osx', 'platform-windows', 'platform-linux', + 'java', 'C#', 'vala', 'cython', 'rust', 'd', 'objective c', 'objective c++', + 'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm', + ] + + +class BuildStep(Enum): + configure = 1 + build = 2 + test = 3 + install = 4 + clean = 5 + validate = 6 + + +class TestResult(BaseException): + def __init__(self, cicmds: T.List[str]) -> None: + self.msg = '' # empty msg indicates test success + self.stdo = '' + self.stde = '' + self.mlog = '' + self.cicmds = cicmds + self.conftime: float = 0 + self.buildtime: float = 0 + self.testtime: float = 0 + + def add_step(self, step: BuildStep, stdo: str, stde: str, mlog: str = '', time: float = 0) -> None: + self.step = step + self.stdo += stdo + self.stde += stde + self.mlog += mlog + if step == BuildStep.configure: + self.conftime = time + elif step == BuildStep.build: + self.buildtime = time + elif step == BuildStep.test: + self.testtime = time + + def fail(self, msg: str) -> None: + self.msg = msg + +python = PythonExternalProgram(sys.executable) +python.sanity() + +class InstalledFile: + def __init__(self, raw: T.Dict[str, str]): + self.path = raw['file'] + self.typ = raw['type'] + self.platform = raw.get('platform', None) + self.language = raw.get('language', 'c') # type: str + + version = raw.get('version', '') # type: str + if version: + self.version = version.split('.') # type: T.List[str] + else: + # split on '' will return [''], we want an empty list though + self.version = [] + + def get_path(self, compiler: str, env: environment.Environment) -> T.Optional[Path]: + p = Path(self.path) + canonical_compiler = compiler + if ((compiler in ['clang-cl', 'intel-cl']) or + (env.machines.host.is_windows() and compiler in {'pgi', 'dmd', 'ldc'})): + canonical_compiler = 'msvc' + + python_suffix = python.info['suffix'] + + has_pdb = False + if self.language in {'c', 'cpp'}: + has_pdb = canonical_compiler == 'msvc' + elif self.language == 'd': + # dmd's optlink does not genearte pdb iles + has_pdb = env.coredata.compilers.host['d'].linker.id in {'link', 'lld-link'} + + # Abort if the platform does not match + matches = { + 'msvc': canonical_compiler == 'msvc', + 'gcc': canonical_compiler != 'msvc', + 'cygwin': env.machines.host.is_cygwin(), + '!cygwin': not env.machines.host.is_cygwin(), + }.get(self.platform or '', True) + if not matches: + return None + + # Handle the different types + if self.typ in {'py_implib', 'python_lib', 'python_file'}: + val = p.as_posix() + val = val.replace('@PYTHON_PLATLIB@', python.platlib) + val = val.replace('@PYTHON_PURELIB@', python.purelib) + p = Path(val) + if self.typ == 'python_file': + return p + if self.typ == 'python_lib': + return p.with_suffix(python_suffix) + if self.typ in ['file', 'dir']: + return p + elif self.typ == 'shared_lib': + if env.machines.host.is_windows() or env.machines.host.is_cygwin(): + # Windows only has foo.dll and foo-X.dll + if len(self.version) > 1: + return None + if self.version: + p = p.with_name('{}-{}'.format(p.name, self.version[0])) + return p.with_suffix('.dll') + + p = p.with_name(f'lib{p.name}') + if env.machines.host.is_darwin(): + # MacOS only has libfoo.dylib and libfoo.X.dylib + if len(self.version) > 1: + return None + + # pathlib.Path.with_suffix replaces, not appends + suffix = '.dylib' + if self.version: + suffix = '.{}{}'.format(self.version[0], suffix) + else: + # pathlib.Path.with_suffix replaces, not appends + suffix = '.so' + if self.version: + suffix = '{}.{}'.format(suffix, '.'.join(self.version)) + return p.with_suffix(suffix) + elif self.typ == 'exe': + if env.machines.host.is_windows() or env.machines.host.is_cygwin(): + return p.with_suffix('.exe') + elif self.typ == 'pdb': + if self.version: + p = p.with_name('{}-{}'.format(p.name, self.version[0])) + return p.with_suffix('.pdb') if has_pdb else None + elif self.typ in {'implib', 'implibempty', 'py_implib'}: + if env.machines.host.is_windows() and canonical_compiler == 'msvc': + # only MSVC doesn't generate empty implibs + if self.typ == 'implibempty' and compiler == 'msvc': + return None + return p.parent / (re.sub(r'^lib', '', p.name) + '.lib') + elif env.machines.host.is_windows() or env.machines.host.is_cygwin(): + if self.typ == 'py_implib': + p = p.with_suffix(python_suffix) + return p.with_suffix('.dll.a') + else: + return None + elif self.typ == 'expr': + return Path(platform_fix_name(p.as_posix(), canonical_compiler, env)) + else: + raise RuntimeError(f'Invalid installed file type {self.typ}') + + return p + + def get_paths(self, compiler: str, env: environment.Environment, installdir: Path) -> T.List[Path]: + p = self.get_path(compiler, env) + if not p: + return [] + if self.typ == 'dir': + abs_p = installdir / p + if not abs_p.exists(): + raise RuntimeError(f'{p} does not exist') + if not abs_p.is_dir(): + raise RuntimeError(f'{p} is not a directory') + return [x.relative_to(installdir) for x in abs_p.rglob('*') if x.is_file() or x.is_symlink()] + else: + return [p] + +@functools.total_ordering +class TestDef: + def __init__(self, path: Path, name: T.Optional[str], args: T.List[str], skip: bool = False): + self.category = path.parts[1] + self.path = path + self.name = name + self.args = args + self.skip = skip + self.env = os.environ.copy() + self.installed_files = [] # type: T.List[InstalledFile] + self.do_not_set_opts = [] # type: T.List[str] + self.stdout = [] # type: T.List[T.Dict[str, str]] + self.skip_expected = False + + # Always print a stack trace for Meson exceptions + self.env['MESON_FORCE_BACKTRACE'] = '1' + + def __repr__(self) -> str: + return '<{}: {:<48} [{}: {}] -- {}>'.format(type(self).__name__, str(self.path), self.name, self.args, self.skip) + + def display_name(self) -> mlog.TV_LoggableList: + # Remove the redundant 'test cases' part + section, id = self.path.parts[1:3] + res: mlog.TV_LoggableList = [f'{section}:', bold(id)] + if self.name: + res += [f' ({self.name})'] + return res + + def __lt__(self, other: object) -> bool: + if isinstance(other, TestDef): + # None is not sortable, so replace it with an empty string + s_id = int(self.path.name.split(' ')[0]) + o_id = int(other.path.name.split(' ')[0]) + return (s_id, self.path, self.name or '') < (o_id, other.path, other.name or '') + return NotImplemented + +failing_logs: T.List[str] = [] +print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ +under_ci = 'CI' in os.environ +ci_jobname = os.environ.get('MESON_CI_JOBNAME', None) +do_debug = under_ci or print_debug +no_meson_log_msg = 'No meson-log.txt found.' + +host_c_compiler: T.Optional[str] = None +compiler_id_map: T.Dict[str, str] = {} +tool_vers_map: T.Dict[str, str] = {} + +compile_commands: T.List[str] +clean_commands: T.List[str] +test_commands: T.List[str] +install_commands: T.List[str] +uninstall_commands: T.List[str] + +backend: 'Backend' +backend_flags: T.List[str] + +stop: bool = False +is_worker_process: bool = False + +# Let's have colors in our CI output +if under_ci: + def _ci_colorize_console() -> bool: + return not is_worker_process + + mlog.colorize_console = _ci_colorize_console + +class StopException(Exception): + def __init__(self) -> None: + super().__init__('Stopped by user') + +def stop_handler(signal: signal.Signals, frame: T.Optional['FrameType']) -> None: + global stop + stop = True +signal.signal(signal.SIGINT, stop_handler) +signal.signal(signal.SIGTERM, stop_handler) + +def setup_commands(optbackend: str) -> None: + global do_debug, backend, backend_flags + global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands + backend, backend_flags = guess_backend(optbackend, shutil.which('msbuild')) + compile_commands, clean_commands, test_commands, install_commands, \ + uninstall_commands = get_backend_commands(backend, do_debug) + +# TODO try to eliminate or at least reduce this function +def platform_fix_name(fname: str, canonical_compiler: str, env: environment.Environment) -> str: + if '?lib' in fname: + if env.machines.host.is_windows() and canonical_compiler == 'msvc': + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/\1.', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + elif env.machines.host.is_windows(): + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/lib\1.', fname) + fname = re.sub(r'\?lib(.*)\.dll$', r'lib\1.dll', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + elif env.machines.host.is_cygwin(): + fname = re.sub(r'lib/\?lib(.*)\.so$', r'bin/cyg\1.dll', fname) + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/cyg\1.', fname) + fname = re.sub(r'\?lib(.*)\.dll$', r'cyg\1.dll', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + else: + fname = re.sub(r'\?lib', 'lib', fname) + + if fname.endswith('?so'): + if env.machines.host.is_windows() and canonical_compiler == 'msvc': + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/(?:lib|)([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif env.machines.host.is_windows(): + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif env.machines.host.is_cygwin(): + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/lib([^/]*?)\?so$', r'/cyg\1.dll', fname) + fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif env.machines.host.is_darwin(): + return fname[:-3] + '.dylib' + else: + return fname[:-3] + '.so' + + return fname + +def validate_install(test: TestDef, installdir: Path, env: environment.Environment) -> str: + ret_msg = '' + expected_raw = [] # type: T.List[Path] + for i in test.installed_files: + try: + expected_raw += i.get_paths(host_c_compiler, env, installdir) + except RuntimeError as err: + ret_msg += f'Expected path error: {err}\n' + expected = {x: False for x in expected_raw} + found = [x.relative_to(installdir) for x in installdir.rglob('*') if x.is_file() or x.is_symlink()] + # Mark all found files as found and detect unexpected files + for fname in found: + if fname not in expected: + ret_msg += f'Extra file {fname} found.\n' + continue + expected[fname] = True + # Check if expected files were found + for p, f in expected.items(): + if not f: + ret_msg += f'Expected file {p} missing.\n' + # List dir content on error + if ret_msg != '': + ret_msg += '\nInstall dir contents:\n' + for p in found: + ret_msg += f' - {p}\n' + return ret_msg + +def log_text_file(logfile: T.TextIO, testdir: Path, result: TestResult) -> None: + logfile.write('%s\nstdout\n\n---\n' % testdir.as_posix()) + logfile.write(result.stdo) + logfile.write('\n\n---\n\nstderr\n\n---\n') + logfile.write(result.stde) + logfile.write('\n\n---\n\n') + if print_debug: + try: + print(result.stdo) + except UnicodeError: + sanitized_out = result.stdo.encode('ascii', errors='replace').decode() + print(sanitized_out) + try: + print(result.stde, file=sys.stderr) + except UnicodeError: + sanitized_err = result.stde.encode('ascii', errors='replace').decode() + print(sanitized_err, file=sys.stderr) + + +def _run_ci_include(args: T.List[str]) -> str: + if not args: + return 'At least one parameter required' + try: + data = Path(args[0]).read_text(errors='ignore', encoding='utf-8') + return 'Included file {}:\n{}\n'.format(args[0], data) + except Exception: + return 'Failed to open {}'.format(args[0]) + +ci_commands = { + 'ci_include': _run_ci_include +} + +def run_ci_commands(raw_log: str) -> T.List[str]: + res = [] + for l in raw_log.splitlines(): + if not l.startswith('!meson_ci!/'): + continue + cmd = shlex.split(l[11:]) + if not cmd or cmd[0] not in ci_commands: + continue + res += ['CI COMMAND {}:\n{}\n'.format(cmd[0], ci_commands[cmd[0]](cmd[1:]))] + return res + +class OutputMatch: + def __init__(self, how: str, expected: str, count: int) -> None: + self.how = how + self.expected = expected + self.count = count + + def match(self, actual: str) -> bool: + if self.how == "re": + return bool(re.match(self.expected, actual)) + return self.expected == actual + +def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) -> str: + if expected: + matches: T.List[OutputMatch] = [] + nomatches: T.List[OutputMatch] = [] + for item in expected: + how = item.get('match', 'literal') + expected_line = item.get('line') + count = int(item.get('count', -1)) + + # Simple heuristic to automatically convert path separators for + # Windows: + # + # Any '/' appearing before 'WARNING' or 'ERROR' (i.e. a path in a + # filename part of a location) is replaced with '\' (in a re: '\\' + # which matches a literal '\') + # + # (There should probably be a way to turn this off for more complex + # cases which don't fit this) + if mesonlib.is_windows(): + if how != "re": + sub = r'\\' + else: + sub = r'\\\\' + expected_line = re.sub(r'/(?=.*(WARNING|ERROR))', sub, expected_line) + + m = OutputMatch(how, expected_line, count) + if count == 0: + nomatches.append(m) + else: + matches.append(m) + + + i = 0 + for actual in output.splitlines(): + # Verify this line does not match any unexpected lines (item.count == 0) + for match in nomatches: + if match.match(actual): + return f'unexpected "{match.expected}" found in {desc}' + # If we matched all expected lines, continue to verify there are + # no unexpected line. If nomatches is empty then we are done already. + if i >= len(matches): + if not nomatches: + break + continue + # Check if this line match current expected line + match = matches[i] + if match.match(actual): + if match.count < 0: + # count was not specified, continue with next expected line, + # it does not matter if this line will be matched again or + # not. + i += 1 + else: + # count was specified (must be >0), continue expecting this + # same line. If count reached 0 we continue with next + # expected line but remember that this one must not match + # anymore. + match.count -= 1 + if match.count == 0: + nomatches.append(match) + i += 1 + + if i < len(matches): + # reached the end of output without finding expected + return f'expected "{matches[i].expected}" not found in {desc}' + + return '' + +def validate_output(test: TestDef, stdo: str, stde: str) -> str: + return _compare_output(test.stdout, stdo, 'stdout') + +# There are some class variables and such that cahce +# information. Clear all of these. The better solution +# would be to change the code so that no state is persisted +# but that would be a lot of work given that Meson was originally +# coded to run as a batch process. +def clear_internal_caches() -> None: + import mesonbuild.interpreterbase + from mesonbuild.dependencies import CMakeDependency + from mesonbuild.mesonlib import PerMachine + mesonbuild.interpreterbase.FeatureNew.feature_registry = {} + CMakeDependency.class_cmakeinfo = PerMachine(None, None) + +def run_test_inprocess(testdir: str) -> T.Tuple[int, str, str, str]: + old_stdout = sys.stdout + sys.stdout = mystdout = StringIO() + old_stderr = sys.stderr + sys.stderr = mystderr = StringIO() + old_cwd = os.getcwd() + os.chdir(testdir) + test_log_fname = Path('meson-logs', 'testlog.txt') + try: + returncode_test = mtest.run_with_args(['--no-rebuild']) + if test_log_fname.exists(): + test_log = test_log_fname.open(encoding='utf-8', errors='ignore').read() + else: + test_log = '' + returncode_benchmark = mtest.run_with_args(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog']) + finally: + sys.stdout = old_stdout + sys.stderr = old_stderr + os.chdir(old_cwd) + return max(returncode_test, returncode_benchmark), mystdout.getvalue(), mystderr.getvalue(), test_log + +# Build directory name must be the same so Ccache works over +# consecutive invocations. +def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str: + import hashlib + src_dir = test.path.as_posix() + if test.name: + src_dir += test.name + rel_dirname = 'b ' + hashlib.sha256(src_dir.encode(errors='ignore')).hexdigest()[0:10] + abs_pathname = os.path.join(tempfile.gettempdir() if use_tmpdir else os.getcwd(), rel_dirname) + if os.path.exists(abs_pathname): + mesonlib.windows_proof_rmtree(abs_pathname) + os.mkdir(abs_pathname) + return abs_pathname + +def format_parameter_file(file_basename: str, test: TestDef, test_build_dir: str) -> Path: + confdata = ConfigurationData() + confdata.values = {'MESON_TEST_ROOT': (str(test.path.absolute()), 'base directory of current test')} + + template = test.path / (file_basename + '.in') + destination = Path(test_build_dir) / file_basename + mesonlib.do_conf_file(str(template), str(destination), confdata, 'meson') + + return destination + +def detect_parameter_files(test: TestDef, test_build_dir: str) -> T.Tuple[Path, Path]: + nativefile = test.path / 'nativefile.ini' + crossfile = test.path / 'crossfile.ini' + + if os.path.exists(str(test.path / 'nativefile.ini.in')): + nativefile = format_parameter_file('nativefile.ini', test, test_build_dir) + + if os.path.exists(str(test.path / 'crossfile.ini.in')): + crossfile = format_parameter_file('crossfile.ini', test, test_build_dir) + + return nativefile, crossfile + +# In previous python versions the global variables are lost in ProcessPoolExecutor. +# So, we use this tuple to restore some of them +class GlobalState(T.NamedTuple): + compile_commands: T.List[str] + clean_commands: T.List[str] + test_commands: T.List[str] + install_commands: T.List[str] + uninstall_commands: T.List[str] + + backend: 'Backend' + backend_flags: T.List[str] + + host_c_compiler: T.Optional[str] + +def run_test(test: TestDef, + extra_args: T.List[str], + should_fail: str, + use_tmp: bool, + state: T.Optional[GlobalState] = None) -> T.Optional[TestResult]: + # Unpack the global state + global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands, backend, backend_flags, host_c_compiler + if state is not None: + compile_commands, clean_commands, test_commands, install_commands, uninstall_commands, backend, backend_flags, host_c_compiler = state + # Store that this is a worker process + global is_worker_process + is_worker_process = True + # Setup the test environment + assert not test.skip, 'Skipped thest should not be run' + build_dir = create_deterministic_builddir(test, use_tmp) + try: + with TemporaryDirectoryWinProof(prefix='i ', dir=None if use_tmp else os.getcwd()) as install_dir: + try: + return _run_test(test, build_dir, install_dir, extra_args, should_fail) + except TestResult as r: + return r + finally: + mlog.shutdown() # Close the log file because otherwise Windows wets itself. + finally: + mesonlib.windows_proof_rmtree(build_dir) + +def _run_test(test: TestDef, + test_build_dir: str, + install_dir: str, + extra_args: T.List[str], + should_fail: str) -> TestResult: + gen_start = time.time() + # Configure in-process + gen_args = [] # type: T.List[str] + if 'prefix' not in test.do_not_set_opts: + gen_args += ['--prefix', 'x:/usr'] if mesonlib.is_windows() else ['--prefix', '/usr'] + if 'libdir' not in test.do_not_set_opts: + gen_args += ['--libdir', 'lib'] + gen_args += [test.path.as_posix(), test_build_dir] + backend_flags + extra_args + + nativefile, crossfile = detect_parameter_files(test, test_build_dir) + + if nativefile.exists(): + gen_args.extend(['--native-file', nativefile.as_posix()]) + if crossfile.exists(): + gen_args.extend(['--cross-file', crossfile.as_posix()]) + (returncode, stdo, stde) = run_configure(gen_args, env=test.env, catch_exception=True) + try: + logfile = Path(test_build_dir, 'meson-logs', 'meson-log.txt') + mesonlog = logfile.open(errors='ignore', encoding='utf-8').read() + except Exception: + mesonlog = no_meson_log_msg + cicmds = run_ci_commands(mesonlog) + testresult = TestResult(cicmds) + testresult.add_step(BuildStep.configure, stdo, stde, mesonlog, time.time() - gen_start) + output_msg = validate_output(test, stdo, stde) + testresult.mlog += output_msg + if output_msg: + testresult.fail('Unexpected output while configuring.') + return testresult + if should_fail == 'meson': + if returncode == 1: + return testresult + elif returncode != 0: + testresult.fail(f'Test exited with unexpected status {returncode}.') + return testresult + else: + testresult.fail('Test that should have failed succeeded.') + return testresult + if returncode != 0: + testresult.fail('Generating the build system failed.') + return testresult + builddata = build.load(test_build_dir) + dir_args = get_backend_args_for_dir(backend, test_build_dir) + + # Build with subprocess + def build_step() -> None: + build_start = time.time() + pc, o, e = Popen_safe(compile_commands + dir_args, cwd=test_build_dir) + testresult.add_step(BuildStep.build, o, e, '', time.time() - build_start) + if should_fail == 'build': + if pc.returncode != 0: + raise testresult + testresult.fail('Test that should have failed to build succeeded.') + raise testresult + if pc.returncode != 0: + testresult.fail('Compiling source code failed.') + raise testresult + + # Touch the meson.build file to force a regenerate + def force_regenerate() -> None: + ensure_backend_detects_changes(backend) + os.utime(str(test.path / 'meson.build')) + + # just test building + build_step() + + # test that regeneration works for build step + force_regenerate() + build_step() # TBD: assert nothing gets built after the regenerate? + + # test that regeneration works for test step + force_regenerate() + + # Test in-process + clear_internal_caches() + test_start = time.time() + (returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir) + testresult.add_step(BuildStep.test, tstdo, tstde, test_log, time.time() - test_start) + if should_fail == 'test': + if returncode != 0: + return testresult + testresult.fail('Test that should have failed to run unit tests succeeded.') + return testresult + if returncode != 0: + testresult.fail('Running unit tests failed.') + return testresult + + # Do installation, if the backend supports it + if install_commands: + env = test.env.copy() + env['DESTDIR'] = install_dir + # Install with subprocess + pi, o, e = Popen_safe(install_commands, cwd=test_build_dir, env=env) + testresult.add_step(BuildStep.install, o, e) + if pi.returncode != 0: + testresult.fail('Running install failed.') + return testresult + + # Clean with subprocess + env = test.env.copy() + pi, o, e = Popen_safe(clean_commands + dir_args, cwd=test_build_dir, env=env) + testresult.add_step(BuildStep.clean, o, e) + if pi.returncode != 0: + testresult.fail('Running clean failed.') + return testresult + + # Validate installed files + testresult.add_step(BuildStep.install, '', '') + if not install_commands: + return testresult + install_msg = validate_install(test, Path(install_dir), builddata.environment) + if install_msg: + testresult.fail('\n' + install_msg) + return testresult + + return testresult + + +# processing of test.json 'skip_*' keys, which can appear at top level, or in +# matrix: +def _skip_keys(test_def: T.Dict) -> T.Tuple[bool, bool]: + skip_expected = False + + # Test is expected to skip if MESON_CI_JOBNAME contains any of the list of + # substrings + if ('skip_on_jobname' in test_def) and (ci_jobname is not None): + skip_expected = any(s in ci_jobname for s in test_def['skip_on_jobname']) + + # Test is expected to skip if os matches + if 'skip_on_os' in test_def: + mesonenv = environment.Environment(None, None, get_fake_options('/')) + for skip_os in test_def['skip_on_os']: + if skip_os.startswith('!'): + if mesonenv.machines.host.system != skip_os[1:]: + skip_expected = True + else: + if mesonenv.machines.host.system == skip_os: + skip_expected = True + + # Skip if environment variable is present + skip = False + if 'skip_on_env' in test_def: + for skip_env_var in test_def['skip_on_env']: + if skip_env_var in os.environ: + skip = True + + return (skip, skip_expected) + + +def load_test_json(t: TestDef, stdout_mandatory: bool) -> T.List[TestDef]: + all_tests: T.List[TestDef] = [] + test_def = {} + test_def_file = t.path / 'test.json' + if test_def_file.is_file(): + test_def = json.loads(test_def_file.read_text(encoding='utf-8')) + + # Handle additional environment variables + env = {} # type: T.Dict[str, str] + if 'env' in test_def: + assert isinstance(test_def['env'], dict) + env = test_def['env'] + for key, val in env.items(): + val = val.replace('@ROOT@', t.path.resolve().as_posix()) + val = val.replace('@PATH@', t.env.get('PATH', '')) + env[key] = val + + # Handle installed files + installed = [] # type: T.List[InstalledFile] + if 'installed' in test_def: + installed = [InstalledFile(x) for x in test_def['installed']] + + # Handle expected output + stdout = test_def.get('stdout', []) + if stdout_mandatory and not stdout: + raise RuntimeError(f"{test_def_file} must contain a non-empty stdout key") + + # Handle the do_not_set_opts list + do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] + + (t.skip, t.skip_expected) = _skip_keys(test_def) + + # Skip tests if the tool requirements are not met + if 'tools' in test_def: + assert isinstance(test_def['tools'], dict) + for tool, vers_req in test_def['tools'].items(): + if tool not in tool_vers_map: + t.skip = True + elif not mesonlib.version_compare(tool_vers_map[tool], vers_req): + t.skip = True + + # Skip the matrix code and just update the existing test + if 'matrix' not in test_def: + t.env.update(env) + t.installed_files = installed + t.do_not_set_opts = do_not_set_opts + t.stdout = stdout + return [t] + + new_opt_list: T.List[T.List[T.Tuple[str, bool, bool]]] + + # 'matrix; entry is present, so build multiple tests from matrix definition + opt_list = [] # type: T.List[T.List[T.Tuple[str, bool, bool]]] + matrix = test_def['matrix'] + assert "options" in matrix + for key, val in matrix["options"].items(): + assert isinstance(val, list) + tmp_opts = [] # type: T.List[T.Tuple[str, bool, bool]] + for i in val: + assert isinstance(i, dict) + assert "val" in i + + (skip, skip_expected) = _skip_keys(i) + + # Only run the test if all compiler ID's match + if 'compilers' in i: + for lang, id_list in i['compilers'].items(): + if lang not in compiler_id_map or compiler_id_map[lang] not in id_list: + skip = True + break + + # Add an empty matrix entry + if i['val'] is None: + tmp_opts += [(None, skip, skip_expected)] + continue + + tmp_opts += [('{}={}'.format(key, i['val']), skip, skip_expected)] + + if opt_list: + new_opt_list = [] + for i in opt_list: + for j in tmp_opts: + new_opt_list += [[*i, j]] + opt_list = new_opt_list + else: + opt_list = [[x] for x in tmp_opts] + + # Exclude specific configurations + if 'exclude' in matrix: + assert isinstance(matrix['exclude'], list) + new_opt_list = [] + for i in opt_list: + exclude = False + opt_names = [x[0] for x in i] + for j in matrix['exclude']: + ex_list = [f'{k}={v}' for k, v in j.items()] + if all([x in opt_names for x in ex_list]): + exclude = True + break + + if not exclude: + new_opt_list += [i] + + opt_list = new_opt_list + + for i in opt_list: + name = ' '.join([x[0] for x in i if x[0] is not None]) + opts = ['-D' + x[0] for x in i if x[0] is not None] + skip = any([x[1] for x in i]) + skip_expected = any([x[2] for x in i]) + test = TestDef(t.path, name, opts, skip or t.skip) + test.env.update(env) + test.installed_files = installed + test.do_not_set_opts = do_not_set_opts + test.stdout = stdout + test.skip_expected = skip_expected or t.skip_expected + all_tests.append(test) + + return all_tests + + +def gather_tests(testdir: Path, stdout_mandatory: bool, only: T.List[str]) -> T.List[TestDef]: + all_tests: T.List[TestDef] = [] + for t in testdir.iterdir(): + # Filter non-tests files (dot files, etc) + if not t.is_dir() or t.name.startswith('.'): + continue + if only and not any(t.name.startswith(prefix) for prefix in only): + continue + test_def = TestDef(t, None, []) + all_tests.extend(load_test_json(test_def, stdout_mandatory)) + return sorted(all_tests) + + +def have_d_compiler() -> bool: + if shutil.which("ldc2"): + return True + elif shutil.which("ldc"): + return True + elif shutil.which("gdc"): + return True + elif shutil.which("dmd"): + # The Windows installer sometimes produces a DMD install + # that exists but segfaults every time the compiler is run. + # Don't know why. Don't know how to fix. Skip in this case. + cp = subprocess.run(['dmd', '--version'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + if cp.stdout == b'': + return False + return True + return False + +def have_objc_compiler(use_tmp: bool) -> bool: + with TemporaryDirectoryWinProof(prefix='b ', dir=None if use_tmp else '.') as build_dir: + env = environment.Environment(None, build_dir, get_fake_options('/')) + try: + objc_comp = detect_objc_compiler(env, MachineChoice.HOST) + except mesonlib.MesonException: + return False + if not objc_comp: + return False + env.coredata.process_new_compiler('objc', objc_comp, env) + try: + objc_comp.sanity_check(env.get_scratch_dir(), env) + except mesonlib.MesonException: + return False + return True + +def have_objcpp_compiler(use_tmp: bool) -> bool: + with TemporaryDirectoryWinProof(prefix='b ', dir=None if use_tmp else '.') as build_dir: + env = environment.Environment(None, build_dir, get_fake_options('/')) + try: + objcpp_comp = detect_objcpp_compiler(env, MachineChoice.HOST) + except mesonlib.MesonException: + return False + if not objcpp_comp: + return False + env.coredata.process_new_compiler('objcpp', objcpp_comp, env) + try: + objcpp_comp.sanity_check(env.get_scratch_dir(), env) + except mesonlib.MesonException: + return False + return True + +def have_java() -> bool: + if shutil.which('javac') and shutil.which('java'): + return True + return False + +def skip_dont_care(t: TestDef) -> bool: + # Everything is optional when not running on CI + if not under_ci: + return True + + # Non-frameworks test are allowed to determine their own skipping under CI (currently) + if not t.category.endswith('frameworks'): + return True + + # For the moment, all skips in jobs which don't set MESON_CI_JOBNAME are + # treated as expected. In the future, we should make it mandatory to set + # MESON_CI_JOBNAME for all CI jobs. + if ci_jobname is None: + return True + + return False + +def skip_csharp(backend: Backend) -> bool: + if backend is not Backend.ninja: + return True + if not shutil.which('resgen'): + return True + if shutil.which('mcs'): + return False + if shutil.which('csc'): + # Only support VS2017 for now. Earlier versions fail + # under CI in mysterious ways. + try: + stdo = subprocess.check_output(['csc', '/version']) + except subprocess.CalledProcessError: + return True + # Having incrementing version numbers would be too easy. + # Microsoft reset the versioning back to 1.0 (from 4.x) + # when they got the Roslyn based compiler. Thus there + # is NO WAY to reliably do version number comparisons. + # Only support the version that ships with VS2017. + return not stdo.startswith(b'2.') + return True + +# In Azure some setups have a broken rustc that will error out +# on all compilation attempts. + +def has_broken_rustc() -> bool: + dirname = Path('brokenrusttest') + if dirname.exists(): + mesonlib.windows_proof_rmtree(dirname.as_posix()) + dirname.mkdir() + sanity_file = dirname / 'sanity.rs' + sanity_file.write_text('fn main() {\n}\n', encoding='utf-8') + pc = subprocess.run(['rustc', '-o', 'sanity.exe', 'sanity.rs'], + cwd=dirname.as_posix(), + stdout = subprocess.DEVNULL, + stderr = subprocess.DEVNULL) + mesonlib.windows_proof_rmtree(dirname.as_posix()) + return pc.returncode != 0 + +def should_skip_rust(backend: Backend) -> bool: + if not shutil.which('rustc'): + return True + if backend is not Backend.ninja: + return True + if mesonlib.is_windows() and has_broken_rustc(): + return True + return False + +def detect_tests_to_run(only: T.Dict[str, T.List[str]], use_tmp: bool) -> T.List[T.Tuple[str, T.List[TestDef], bool]]: + """ + Parameters + ---------- + only: dict of categories and list of test cases, optional + specify names of tests to run + + Returns + ------- + gathered_tests: list of tuple of str, list of TestDef, bool + tests to run + """ + + skip_fortran = not(shutil.which('gfortran') or + shutil.which('flang') or + shutil.which('pgfortran') or + shutil.which('ifort')) + + class TestCategory: + def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False): + self.category = category # category name + self.subdir = subdir # subdirectory + self.skip = skip # skip condition + self.stdout_mandatory = stdout_mandatory # expected stdout is mandatory for tests in this category + + all_tests = [ + TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), + TestCategory('common', 'common'), + TestCategory('native', 'native'), + TestCategory('warning-meson', 'warning', stdout_mandatory=True), + TestCategory('failing-meson', 'failing', stdout_mandatory=True), + TestCategory('failing-build', 'failing build'), + TestCategory('failing-test', 'failing test'), + TestCategory('keyval', 'keyval'), + TestCategory('platform-osx', 'osx', not mesonlib.is_osx()), + TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), + TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), + TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), + TestCategory('C#', 'csharp', skip_csharp(backend)), + TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), + TestCategory('cython', 'cython', backend is not Backend.ninja or not shutil.which(os.environ.get('CYTHON', 'cython'))), + TestCategory('rust', 'rust', should_skip_rust(backend)), + TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()), + TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), + TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), + TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja), + TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), + # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja + TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), + TestCategory('python3', 'python3', backend is not Backend.ninja), + TestCategory('python', 'python'), + TestCategory('fpga', 'fpga', shutil.which('yosys') is None), + TestCategory('frameworks', 'frameworks'), + TestCategory('nasm', 'nasm'), + TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), + ] + + categories = [t.category for t in all_tests] + assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories' + + if only: + for key in only.keys(): + assert key in categories, f'key `{key}` is not a recognized category' + all_tests = [t for t in all_tests if t.category in only.keys()] + + gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory, only[t.category]), t.skip) for t in all_tests] + return gathered_tests + +def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], + log_name_base: str, + failfast: bool, + extra_args: T.List[str], + use_tmp: bool, + num_workers: int) -> T.Tuple[int, int, int]: + txtname = log_name_base + '.txt' + with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf: + return _run_tests(all_tests, log_name_base, failfast, extra_args, use_tmp, num_workers, lf) + +class TestStatus(Enum): + OK = normal_green(' [SUCCESS] ') + SKIP = yellow(' [SKIPPED] ') + ERROR = red(' [ERROR] ') + UNEXSKIP = red('[UNEXSKIP] ') + UNEXRUN = red(' [UNEXRUN] ') + CANCELED = cyan('[CANCELED] ') + RUNNING = blue(' [RUNNING] ') # Should never be actually printed + LOG = bold(' [LOG] ') # Should never be actually printed + +def default_print(*args: mlog.TV_Loggable, sep: str = ' ') -> None: + print(*args, sep=sep) + +safe_print = default_print + +class TestRunFuture: + def __init__(self, name: str, testdef: TestDef, future: T.Optional['Future[T.Optional[TestResult]]']) -> None: + super().__init__() + self.name = name + self.testdef = testdef + self.future = future + self.status = TestStatus.RUNNING if self.future is not None else TestStatus.SKIP + + @property + def result(self) -> T.Optional[TestResult]: + return self.future.result() if self.future else None + + def log(self) -> None: + without_install = '' if install_commands else '(without install)' + safe_print(self.status.value, without_install, *self.testdef.display_name()) + + def update_log(self, new_status: TestStatus) -> None: + self.status = new_status + self.log() + + def cancel(self) -> None: + if self.future is not None and self.future.cancel(): + self.status = TestStatus.CANCELED + +class LogRunFuture: + def __init__(self, msgs: mlog.TV_LoggableList) -> None: + self.msgs = msgs + self.status = TestStatus.LOG + + def log(self) -> None: + safe_print(*self.msgs, sep='') + + def cancel(self) -> None: + pass + +RunFutureUnion = T.Union[TestRunFuture, LogRunFuture] + +def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], + log_name_base: str, + failfast: bool, + extra_args: T.List[str], + use_tmp: bool, + num_workers: int, + logfile: T.TextIO) -> T.Tuple[int, int, int]: + global stop, host_c_compiler + xmlname = log_name_base + '.xml' + junit_root = ET.Element('testsuites') + conf_time: float = 0 + build_time: float = 0 + test_time: float = 0 + passing_tests = 0 + failing_tests = 0 + skipped_tests = 0 + + print(f'\nRunning tests with {num_workers} workers') + + # Pack the global state + state = (compile_commands, clean_commands, test_commands, install_commands, uninstall_commands, backend, backend_flags, host_c_compiler) + executor = ProcessPoolExecutor(max_workers=num_workers) + + futures: T.List[RunFutureUnion] = [] + + # First, collect and start all tests and also queue log messages + for name, test_cases, skipped in all_tests: + current_suite = ET.SubElement(junit_root, 'testsuite', {'name': name, 'tests': str(len(test_cases))}) + if skipped: + futures += [LogRunFuture(['\n', bold(f'Not running {name} tests.'), '\n'])] + else: + futures += [LogRunFuture(['\n', bold(f'Running {name} tests.'), '\n'])] + + for t in test_cases: + # Jenkins screws us over by automatically sorting test cases by name + # and getting it wrong by not doing logical number sorting. + (testnum, testbase) = t.path.name.split(' ', 1) + testname = '%.3d %s' % (int(testnum), testbase) + if t.name: + testname += f' ({t.name})' + should_fail = '' + suite_args = [] + if name.startswith('failing'): + should_fail = name.split('failing-')[1] + if name.startswith('warning'): + suite_args = ['--fatal-meson-warnings'] + should_fail = name.split('warning-')[1] + + if skipped or t.skip: + futures += [TestRunFuture(testname, t, None)] + continue + result_future = executor.submit(run_test, t, extra_args + suite_args + t.args, should_fail, use_tmp, state=state) + futures += [TestRunFuture(testname, t, result_future)] + + # Ensure we only cancel once + tests_canceled = False + + # Optionally enable the tqdm progress bar + global safe_print + futures_iter: T.Iterable[RunFutureUnion] = futures + try: + from tqdm import tqdm + futures_iter = tqdm(futures, desc='Running tests', unit='test') + + def tqdm_print(*args: mlog.TV_Loggable, sep: str = ' ') -> None: + tqdm.write(sep.join([str(x) for x in args])) + + safe_print = tqdm_print + except ImportError: + pass + + # Wait and handle the test results and print the stored log output + for f in futures_iter: + # Just a log entry to print something to stdout + sys.stdout.flush() + if isinstance(f, LogRunFuture): + f.log() + continue + + # Acutal Test run + testname = f.name + t = f.testdef + try: + result = f.result + except (CancelledError, KeyboardInterrupt): + f.status = TestStatus.CANCELED + + if stop and not tests_canceled: + num_running = sum([1 if f2.status is TestStatus.RUNNING else 0 for f2 in futures]) + for f2 in futures: + f2.cancel() + executor.shutdown() + num_canceled = sum([1 if f2.status is TestStatus.CANCELED else 0 for f2 in futures]) + safe_print(f'\nCanceled {num_canceled} out of {num_running} running tests.') + safe_print(f'Finishing the remaining {num_running - num_canceled} tests.\n') + tests_canceled = True + + # Handle canceled tests + if f.status is TestStatus.CANCELED: + f.log() + continue + + # Handle skipped tests + if result is None: + # skipped due to skipped category skip or 'tools:' or 'skip_on_env:' + is_skipped = True + skip_as_expected = True + else: + # skipped due to test outputting 'MESON_SKIP_TEST' + is_skipped = 'MESON_SKIP_TEST' in result.stdo + if not skip_dont_care(t): + skip_as_expected = (is_skipped == t.skip_expected) + else: + skip_as_expected = True + + if is_skipped: + skipped_tests += 1 + + if is_skipped and skip_as_expected: + f.update_log(TestStatus.SKIP) + current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, 'classname': t.category}) + ET.SubElement(current_test, 'skipped', {}) + continue + + if not skip_as_expected: + failing_tests += 1 + if is_skipped: + skip_msg = 'Test asked to be skipped, but was not expected to' + status = TestStatus.UNEXSKIP + else: + skip_msg = 'Test ran, but was expected to be skipped' + status = TestStatus.UNEXRUN + result.msg = "%s for MESON_CI_JOBNAME '%s'" % (skip_msg, ci_jobname) + + f.update_log(status) + current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, 'classname': t.category}) + ET.SubElement(current_test, 'failure', {'message': result.msg}) + continue + + # Handle Failed tests + if result.msg != '': + f.update_log(TestStatus.ERROR) + safe_print(bold('During:'), result.step.name) + safe_print(bold('Reason:'), result.msg) + failing_tests += 1 + # Append a visual seperator for the different test cases + cols = shutil.get_terminal_size((100, 20)).columns + name_str = ' '.join([str(x) for x in f.testdef.display_name()]) + name_len = len(re.sub(r'\x1B[^m]+m', '', name_str)) # Do not count escape sequences + left_w = (cols // 2) - (name_len // 2) - 1 + left_w = max(3, left_w) + right_w = cols - left_w - name_len - 2 + right_w = max(3, right_w) + failing_logs.append(f'\n\x1b[31m{"="*left_w}\x1b[0m {name_str} \x1b[31m{"="*right_w}\x1b[0m\n') + if result.step == BuildStep.configure and result.mlog != no_meson_log_msg: + # For configure failures, instead of printing stdout, + # print the meson log if available since it's a superset + # of stdout and often has very useful information. + failing_logs.append(result.mlog) + elif under_ci: + # Always print the complete meson log when running in + # a CI. This helps debugging issues that only occur in + # a hard to reproduce environment + failing_logs.append(result.mlog) + failing_logs.append(result.stdo) + else: + failing_logs.append(result.stdo) + for cmd_res in result.cicmds: + failing_logs.append(cmd_res) + failing_logs.append(result.stde) + if failfast: + safe_print("Cancelling the rest of the tests") + for f2 in futures: + f2.cancel() + else: + f.update_log(TestStatus.OK) + passing_tests += 1 + conf_time += result.conftime + build_time += result.buildtime + test_time += result.testtime + total_time = conf_time + build_time + test_time + log_text_file(logfile, t.path, result) + current_test = ET.SubElement( + current_suite, + 'testcase', + {'name': testname, 'classname': t.category, 'time': '%.3f' % total_time} + ) + if result.msg != '': + ET.SubElement(current_test, 'failure', {'message': result.msg}) + stdoel = ET.SubElement(current_test, 'system-out') + stdoel.text = result.stdo + stdeel = ET.SubElement(current_test, 'system-err') + stdeel.text = result.stde + + # Reset, just in case + safe_print = default_print + + print() + print("Total configuration time: %.2fs" % conf_time) + print("Total build time: %.2fs" % build_time) + print("Total test time: %.2fs" % test_time) + ET.ElementTree(element=junit_root).write(xmlname, xml_declaration=True, encoding='UTF-8') + return passing_tests, failing_tests, skipped_tests + +def check_meson_commands_work(use_tmpdir: bool, extra_args: T.List[str]) -> None: + global backend, compile_commands, test_commands, install_commands + testdir = PurePath('test cases', 'common', '1 trivial').as_posix() + meson_commands = mesonlib.python_command + [get_meson_script()] + with TemporaryDirectoryWinProof(prefix='b ', dir=None if use_tmpdir else '.') as build_dir: + print('Checking that configuring works...') + gen_cmd = meson_commands + [testdir, build_dir] + backend_flags + extra_args + pc, o, e = Popen_safe(gen_cmd) + if pc.returncode != 0: + raise RuntimeError(f'Failed to configure {testdir!r}:\n{e}\n{o}') + print('Checking that introspect works...') + pc, o, e = Popen_safe(meson_commands + ['introspect', '--targets'], cwd=build_dir) + json.loads(o) + if pc.returncode != 0: + raise RuntimeError(f'Failed to introspect --targets {testdir!r}:\n{e}\n{o}') + print('Checking that building works...') + dir_args = get_backend_args_for_dir(backend, build_dir) + pc, o, e = Popen_safe(compile_commands + dir_args, cwd=build_dir) + if pc.returncode != 0: + raise RuntimeError(f'Failed to build {testdir!r}:\n{e}\n{o}') + print('Checking that testing works...') + pc, o, e = Popen_safe(test_commands, cwd=build_dir) + if pc.returncode != 0: + raise RuntimeError(f'Failed to test {testdir!r}:\n{e}\n{o}') + if install_commands: + print('Checking that installing works...') + pc, o, e = Popen_safe(install_commands, cwd=build_dir) + if pc.returncode != 0: + raise RuntimeError(f'Failed to install {testdir!r}:\n{e}\n{o}') + + +def detect_system_compiler(options: 'CompilerArgumentType') -> None: + global host_c_compiler, compiler_id_map + + fake_opts = get_fake_options('/') + if options.cross_file: + fake_opts.cross_file = [options.cross_file] + if options.native_file: + fake_opts.native_file = [options.native_file] + + env = environment.Environment(None, None, fake_opts) + + print_compilers(env, MachineChoice.HOST) + if options.cross_file: + print_compilers(env, MachineChoice.BUILD) + + for lang in sorted(compilers.all_languages): + try: + comp = compiler_from_language(env, lang, MachineChoice.HOST) + # note compiler id for later use with test.json matrix + compiler_id_map[lang] = comp.get_id() + except mesonlib.MesonException: + comp = None + + # note C compiler for later use by platform_fix_name() + if lang == 'c': + if comp: + host_c_compiler = comp.get_id() + else: + raise RuntimeError("Could not find C compiler.") + + +def print_compilers(env: 'Environment', machine: MachineChoice) -> None: + print() + print(f'{machine.get_lower_case_name()} machine compilers') + print() + for lang in sorted(compilers.all_languages): + try: + comp = compiler_from_language(env, lang, machine) + details = '{:<10} {} {}'.format('[' + comp.get_id() + ']', ' '.join(comp.get_exelist()), comp.get_version_string()) + except mesonlib.MesonException: + details = '[not found]' + print(f'{lang:<7}: {details}') + +class ToolInfo(T.NamedTuple): + tool: str + args: T.List[str] + regex: T.Pattern + match_group: int + +def print_tool_versions() -> None: + tools: T.List[ToolInfo] = [ + ToolInfo( + 'ninja', + ['--version'], + re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'), + 1, + ), + ToolInfo( + 'cmake', + ['--version'], + re.compile(r'^cmake version ([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'), + 1, + ), + ToolInfo( + 'hotdoc', + ['--version'], + re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'), + 1, + ), + ] + + def get_version(t: ToolInfo) -> str: + exe = shutil.which(t.tool) + if not exe: + return 'not found' + + args = [t.tool] + t.args + pc, o, e = Popen_safe(args) + if pc.returncode != 0: + return '{} (invalid {} executable)'.format(exe, t.tool) + for i in o.split('\n'): + i = i.strip('\n\r\t ') + m = t.regex.match(i) + if m is not None: + tool_vers_map[t.tool] = m.group(t.match_group) + return '{} ({})'.format(exe, m.group(t.match_group)) + + return f'{exe} (unknown)' + + print() + print('tools') + print() + + max_width = max([len(x.tool) for x in tools] + [7]) + for tool in tools: + print('{0:<{2}}: {1}'.format(tool.tool, get_version(tool), max_width)) + print() + +def clear_transitive_files() -> None: + a = Path('test cases/common') + for d in a.glob('*subproject subdir/subprojects/subsubsub*'): + if d.is_dir(): + mesonlib.windows_proof_rmtree(str(d)) + else: + mesonlib.windows_proof_rm(str(d)) + +if __name__ == '__main__': + setup_vsenv() + + try: + # This fails in some CI environments for unknown reasons. + num_workers = multiprocessing.cpu_count() + except Exception as e: + print('Could not determine number of CPUs due to the following reason:' + str(e)) + print('Defaulting to using only two processes') + num_workers = 2 + # Due to Ninja deficiency, almost 50% of build time + # is spent waiting. Do something useful instead. + # + # Remove this once the following issue has been resolved: + # https://github.com/mesonbuild/meson/pull/2082 + if not mesonlib.is_windows(): # twice as fast on Windows by *not* multiplying by 2. + num_workers *= 2 + + parser = argparse.ArgumentParser(description="Run the test suite of Meson.") + parser.add_argument('extra_args', nargs='*', + help='arguments that are passed directly to Meson (remember to have -- before these).') + parser.add_argument('--backend', dest='backend', choices=backendlist) + parser.add_argument('-j', dest='num_workers', type=int, default=num_workers, + help=f'Maximum number of parallel tests (default {num_workers})') + parser.add_argument('--failfast', action='store_true', + help='Stop running if test case fails') + parser.add_argument('--no-unittests', action='store_true', + help='Not used, only here to simplify run_tests.py') + parser.add_argument('--only', default=[], + help='name of test(s) to run, in format "category[/name]" where category is one of: ' + ', '.join(ALL_TESTS), nargs='+') + parser.add_argument('--cross-file', action='store', help='File describing cross compilation environment.') + parser.add_argument('--native-file', action='store', help='File describing native compilation environment.') + parser.add_argument('--use-tmpdir', action='store_true', help='Use tmp directory for temporary files.') + options = T.cast('ArgumentType', parser.parse_args()) + + if options.cross_file: + options.extra_args += ['--cross-file', options.cross_file] + if options.native_file: + options.extra_args += ['--native-file', options.native_file] + + clear_transitive_files() + + print('Meson build system', meson_version, 'Project Tests') + print('Using python', sys.version.split('\n')[0]) + if 'VSCMD_VER' in os.environ: + print('VSCMD version', os.environ['VSCMD_VER']) + setup_commands(options.backend) + detect_system_compiler(options) + print_tool_versions() + script_dir = os.path.split(__file__)[0] + if script_dir != '': + os.chdir(script_dir) + check_meson_commands_work(options.use_tmpdir, options.extra_args) + only = collections.defaultdict(list) + for i in options.only: + try: + cat, case = i.split('/') + only[cat].append(case) + except ValueError: + only[i].append('') + try: + all_tests = detect_tests_to_run(only, options.use_tmpdir) + res = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args, options.use_tmpdir, options.num_workers) + (passing_tests, failing_tests, skipped_tests) = res + except StopException: + pass + print() + print('Total passed tests: ', green(str(passing_tests))) + print('Total failed tests: ', red(str(failing_tests))) + print('Total skipped tests:', yellow(str(skipped_tests))) + if failing_tests > 0: + print('\nMesonlogs of failing tests\n') + for l in failing_logs: + try: + print(l, '\n') + except UnicodeError: + print(l.encode('ascii', errors='replace').decode(), '\n') + for name, dirs, _ in all_tests: + dir_names = list({x.path.name for x in dirs}) + for k, g in itertools.groupby(dir_names, key=lambda x: x.split()[0]): + tests = list(g) + if len(tests) != 1: + print('WARNING: The {} suite contains duplicate "{}" tests: "{}"'.format(name, k, '", "'.join(tests))) + clear_transitive_files() + raise SystemExit(failing_tests) diff --git a/meson/run_single_test.py b/meson/run_single_test.py new file mode 100755 index 000000000..0a9357300 --- /dev/null +++ b/meson/run_single_test.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# SPDX-license-identifier: Apache-2.0 +# Copyright © 2021 Intel Corporation + +"""Script for running a single project test. + +This script is meant for Meson developers who want to run a single project +test, with all of the rules from the test.json file loaded. +""" + +import argparse +import pathlib +import typing as T + +from mesonbuild import mlog +from run_project_tests import TestDef, load_test_json, run_test, BuildStep +from run_project_tests import setup_commands, detect_system_compiler, print_tool_versions + +if T.TYPE_CHECKING: + from run_project_tests import CompilerArgumentType + + class ArgumentType(CompilerArgumentType): + + """Typing information for command line arguments.""" + + case: pathlib.Path + subtests: T.List[int] + backend: str + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('case', type=pathlib.Path, help='The test case to run') + parser.add_argument('--subtest', type=int, action='append', dest='subtests', help='which subtests to run') + parser.add_argument('--backend', action='store', help="Which backend to use") + parser.add_argument('--cross-file', action='store', help='File describing cross compilation environment.') + parser.add_argument('--native-file', action='store', help='File describing native compilation environment.') + parser.add_argument('--use-tmpdir', action='store_true', help='Use tmp directory for temporary files.') + args = T.cast('ArgumentType', parser.parse_args()) + + setup_commands(args.backend) + detect_system_compiler(args) + print_tool_versions() + + test = TestDef(args.case, args.case.stem, []) + tests = load_test_json(test, False) + if args.subtests: + tests = [t for i, t in enumerate(tests) if i in args.subtests] + + results = [run_test(t, t.args, '', True) for t in tests] + failed = False + for test, result in zip(tests, results): + if (result is None) or ('MESON_SKIP_TEST' in result.stdo): + msg = mlog.yellow('SKIP:') + elif result.msg: + msg = mlog.red('FAIL:') + failed = True + else: + msg = mlog.green('PASS:') + mlog.log(msg, *test.display_name()) + if result is not None and result.msg and 'MESON_SKIP_TEST' not in result.stdo: + mlog.log('reason:', result.msg) + if result.step is BuildStep.configure: + # For configure failures, instead of printing stdout, + # print the meson log if available since it's a superset + # of stdout and often has very useful information. + mlog.log(result.mlog) + else: + mlog.log(result.stdo) + for cmd_res in result.cicmds: + mlog.log(cmd_res) + mlog.log(result.stde) + + exit(1 if failed else 0) + +if __name__ == "__main__": + main() diff --git a/meson/run_tests.py b/meson/run_tests.py new file mode 100755 index 000000000..90641ada1 --- /dev/null +++ b/meson/run_tests.py @@ -0,0 +1,387 @@ +#!/usr/bin/env python3 + +# Copyright 2012-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Work around some pathlib bugs... +from mesonbuild import _pathlib +import sys +sys.modules['pathlib'] = _pathlib + +import collections +import os +import time +import shutil +import subprocess +import platform +import argparse +import traceback +from io import StringIO +from enum import Enum +from glob import glob +from pathlib import Path +from unittest import mock +import typing as T + +from mesonbuild import compilers +from mesonbuild import dependencies +from mesonbuild import mesonlib +from mesonbuild import mesonmain +from mesonbuild import mtest +from mesonbuild import mlog +from mesonbuild.environment import Environment, detect_ninja +from mesonbuild.coredata import backendlist, version as meson_version +from mesonbuild.mesonlib import OptionKey + +NINJA_1_9_OR_NEWER = False +NINJA_CMD = None +# If we're on CI, just assume we have ninja in PATH and it's new enough because +# we provide that. This avoids having to detect ninja for every subprocess unit +# test that we run. +if 'CI' in os.environ: + NINJA_1_9_OR_NEWER = True + NINJA_CMD = ['ninja'] +else: + # Look for 1.9 to see if https://github.com/ninja-build/ninja/issues/1219 + # is fixed + NINJA_CMD = detect_ninja('1.9') + if NINJA_CMD is not None: + NINJA_1_9_OR_NEWER = True + else: + mlog.warning('Found ninja <1.9, tests will run slower', once=True) + NINJA_CMD = detect_ninja() +if NINJA_CMD is None: + raise RuntimeError('Could not find Ninja v1.7 or newer') + +def guess_backend(backend_str: str, msbuild_exe: str) -> T.Tuple['Backend', T.List[str]]: + # Auto-detect backend if unspecified + backend_flags = [] + if backend_str is None: + if msbuild_exe is not None and (mesonlib.is_windows() and not _using_intelcl()): + backend_str = 'vs' # Meson will auto-detect VS version to use + else: + backend_str = 'ninja' + + # Set backend arguments for Meson + if backend_str.startswith('vs'): + backend_flags = ['--backend=' + backend_str] + backend = Backend.vs + elif backend_str == 'xcode': + backend_flags = ['--backend=xcode'] + backend = Backend.xcode + elif backend_str == 'ninja': + backend_flags = ['--backend=ninja'] + backend = Backend.ninja + else: + raise RuntimeError(f'Unknown backend: {backend_str!r}') + return (backend, backend_flags) + + +def _using_intelcl() -> bool: + """ + detect if intending to using Intel-Cl compilers (Intel compilers on Windows) + Sufficient evidence of intent is that user is working in the Intel compiler + shell environment, otherwise this function returns False + """ + if not mesonlib.is_windows(): + return False + # handle where user tried to "blank" MKLROOT and left space(s) + if not os.environ.get('MKLROOT', '').strip(): + return False + if (os.environ.get('CC') == 'icl' or + os.environ.get('CXX') == 'icl' or + os.environ.get('FC') == 'ifort'): + return True + # Intel-Cl users might not have the CC,CXX,FC envvars set, + # but because they're in Intel shell, the exe's below are on PATH + if shutil.which('icl') or shutil.which('ifort'): + return True + mlog.warning('It appears you might be intending to use Intel compiler on Windows ' + 'since non-empty environment variable MKLROOT is set to {} ' + 'However, Meson cannot find the Intel WIndows compiler executables (icl,ifort).' + 'Please try using the Intel shell.'.format(os.environ.get('MKLROOT'))) + return False + + +# Fake classes and objects for mocking +class FakeBuild: + def __init__(self, env): + self.environment = env + +class FakeCompilerOptions: + def __init__(self): + self.value = [] + +# TODO: use a typing.Protocol here +def get_fake_options(prefix: str = '') -> argparse.Namespace: + opts = argparse.Namespace() + opts.native_file = [] + opts.cross_file = None + opts.wrap_mode = None + opts.prefix = prefix + opts.cmd_line_options = {} + return opts + +def get_fake_env(sdir='', bdir=None, prefix='', opts=None): + if opts is None: + opts = get_fake_options(prefix) + env = Environment(sdir, bdir, opts) + env.coredata.options[OptionKey('args', lang='c')] = FakeCompilerOptions() + env.machines.host.cpu_family = 'x86_64' # Used on macOS inside find_library + return env + + +Backend = Enum('Backend', 'ninja vs xcode') + +if 'MESON_EXE' in os.environ: + meson_exe = mesonlib.split_args(os.environ['MESON_EXE']) +else: + meson_exe = None + +if mesonlib.is_windows() or mesonlib.is_cygwin(): + exe_suffix = '.exe' +else: + exe_suffix = '' + +def get_meson_script() -> str: + ''' + Guess the meson that corresponds to the `mesonbuild` that has been imported + so we can run configure and other commands in-process, since mesonmain.run + needs to know the meson_command to use. + + Also used by run_unittests.py to determine what meson to run when not + running in-process (which is the default). + ''' + # Is there a meson.py next to the mesonbuild currently in use? + mesonbuild_dir = Path(mesonmain.__file__).resolve().parent.parent + meson_script = mesonbuild_dir / 'meson.py' + if meson_script.is_file(): + return str(meson_script) + # Then if mesonbuild is in PYTHONPATH, meson must be in PATH + mlog.warning('Could not find meson.py next to the mesonbuild module. ' + 'Trying system meson...') + meson_cmd = shutil.which('meson') + if meson_cmd: + return meson_cmd + raise RuntimeError(f'Could not find {meson_script!r} or a meson in PATH') + +def get_backend_args_for_dir(backend: Backend, builddir: str) -> T.List[str]: + ''' + Visual Studio backend needs to be given the solution to build + ''' + if backend is Backend.vs: + sln_name = glob(os.path.join(builddir, '*.sln'))[0] + return [os.path.split(sln_name)[-1]] + return [] + +def find_vcxproj_with_target(builddir, target): + import re, fnmatch + t, ext = os.path.splitext(target) + if ext: + p = fr'{t}\s*\{ext}' + else: + p = fr'{t}' + for _, _, files in os.walk(builddir): + for f in fnmatch.filter(files, '*.vcxproj'): + f = os.path.join(builddir, f) + with open(f, encoding='utf-8') as o: + if re.search(p, o.read(), flags=re.MULTILINE): + return f + raise RuntimeError(f'No vcxproj matching {p!r} in {builddir!r}') + +def get_builddir_target_args(backend: Backend, builddir, target): + dir_args = [] + if not target: + dir_args = get_backend_args_for_dir(backend, builddir) + if target is None: + return dir_args + if backend is Backend.vs: + vcxproj = find_vcxproj_with_target(builddir, target) + target_args = [vcxproj] + elif backend is Backend.xcode: + target_args = ['-target', target] + elif backend is Backend.ninja: + target_args = [target] + else: + raise AssertionError(f'Unknown backend: {backend!r}') + return target_args + dir_args + +def get_backend_commands(backend: Backend, debug: bool = False) -> \ + T.Tuple[T.List[str], T.List[str], T.List[str], T.List[str], T.List[str]]: + install_cmd: T.List[str] = [] + uninstall_cmd: T.List[str] = [] + clean_cmd: T.List[str] + cmd: T.List[str] + test_cmd: T.List[str] + if backend is Backend.vs: + cmd = ['msbuild'] + clean_cmd = cmd + ['/target:Clean'] + test_cmd = cmd + ['RUN_TESTS.vcxproj'] + elif backend is Backend.xcode: + cmd = ['xcodebuild'] + clean_cmd = cmd + ['-alltargets', 'clean'] + test_cmd = cmd + ['-target', 'RUN_TESTS'] + elif backend is Backend.ninja: + global NINJA_CMD + cmd = NINJA_CMD + ['-w', 'dupbuild=err', '-d', 'explain'] + if debug: + cmd += ['-v'] + clean_cmd = cmd + ['clean'] + test_cmd = cmd + ['test', 'benchmark'] + install_cmd = cmd + ['install'] + uninstall_cmd = cmd + ['uninstall'] + else: + raise AssertionError(f'Unknown backend: {backend!r}') + return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd + +def ensure_backend_detects_changes(backend: Backend) -> None: + global NINJA_1_9_OR_NEWER + if backend is not Backend.ninja: + return + need_workaround = False + # We're using ninja >= 1.9 which has QuLogic's patch for sub-1s resolution + # timestamps + if not NINJA_1_9_OR_NEWER: + mlog.warning('Don\'t have ninja >= 1.9, enabling timestamp resolution workaround', once=True) + need_workaround = True + # Increase the difference between build.ninja's timestamp and the timestamp + # of whatever you changed: https://github.com/ninja-build/ninja/issues/371 + if need_workaround: + time.sleep(1) + +def run_mtest_inprocess(commandlist: T.List[str]) -> T.Tuple[int, str, str]: + stderr = StringIO() + stdout = StringIO() + with mock.patch.object(sys, 'stdout', stdout), mock.patch.object(sys, 'stderr', stderr): + returncode = mtest.run_with_args(commandlist) + return returncode, stdout.getvalue(), stderr.getvalue() + +def clear_meson_configure_class_caches() -> None: + compilers.CCompiler.find_library_cache = {} + compilers.CCompiler.find_framework_cache = {} + dependencies.PkgConfigDependency.pkgbin_cache = {} + dependencies.PkgConfigDependency.class_pkgbin = mesonlib.PerMachine(None, None) + mesonlib.project_meson_versions = collections.defaultdict(str) + +def run_configure_inprocess(commandlist: T.List[str], env: T.Optional[T.Dict[str, str]] = None, catch_exception: bool = False) -> T.Tuple[int, str, str]: + stderr = StringIO() + stdout = StringIO() + returncode = 0 + with mock.patch.dict(os.environ, env or {}), mock.patch.object(sys, 'stdout', stdout), mock.patch.object(sys, 'stderr', stderr): + try: + returncode = mesonmain.run(commandlist, get_meson_script()) + except Exception: + if catch_exception: + returncode = 1 + traceback.print_exc() + else: + raise + finally: + clear_meson_configure_class_caches() + return returncode, stdout.getvalue(), stderr.getvalue() + +def run_configure_external(full_command: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]: + pc, o, e = mesonlib.Popen_safe(full_command, env=env) + return pc.returncode, o, e + +def run_configure(commandlist: T.List[str], env: T.Optional[T.Dict[str, str]] = None, catch_exception: bool = False) -> T.Tuple[int, str, str]: + global meson_exe + if meson_exe: + return run_configure_external(meson_exe + commandlist, env=env) + return run_configure_inprocess(commandlist, env=env, catch_exception=catch_exception) + +def print_system_info(): + print(mlog.bold('System information.')) + print('Architecture:', platform.architecture()) + print('Machine:', platform.machine()) + print('Platform:', platform.system()) + print('Processor:', platform.processor()) + print('System:', platform.system()) + print('') + print(flush=True) + +def main(): + print_system_info() + parser = argparse.ArgumentParser() + parser.add_argument('--backend', default=None, dest='backend', + choices=backendlist) + parser.add_argument('--cross', default=[], dest='cross', action='append') + parser.add_argument('--cross-only', action='store_true') + parser.add_argument('--failfast', action='store_true') + parser.add_argument('--no-unittests', action='store_true', default=False) + (options, _) = parser.parse_known_args() + returncode = 0 + backend, _ = guess_backend(options.backend, shutil.which('msbuild')) + no_unittests = options.no_unittests + # Running on a developer machine? Be nice! + if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'CI' not in os.environ: + os.nice(20) + # Appveyor sets the `platform` environment variable which completely messes + # up building with the vs2010 and vs2015 backends. + # + # Specifically, MSBuild reads the `platform` environment variable to set + # the configured value for the platform (Win32/x64/arm), which breaks x86 + # builds. + # + # Appveyor setting this also breaks our 'native build arch' detection for + # Windows in environment.py:detect_windows_arch() by overwriting the value + # of `platform` set by vcvarsall.bat. + # + # While building for x86, `platform` should be unset. + if 'APPVEYOR' in os.environ and os.environ['arch'] == 'x86': + os.environ.pop('platform') + # Run tests + # Can't pass arguments to unit tests, so set the backend to use in the environment + env = os.environ.copy() + if not options.cross: + cmd = mesonlib.python_command + ['run_meson_command_tests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + if no_unittests: + print('Skipping all unit tests.') + print(flush=True) + returncode = 0 + else: + print(mlog.bold('Running unittests.')) + print(flush=True) + cmd = mesonlib.python_command + ['run_unittests.py', '--backend=' + backend.name, '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:] + returncode += subprocess.call(cmd, env=env) + else: + cross_test_args = mesonlib.python_command + ['run_cross_test.py'] + for cf in options.cross: + print(mlog.bold(f'Running {cf} cross tests.')) + print(flush=True) + cmd = cross_test_args + ['cross/' + cf] + if options.failfast: + cmd += ['--failfast'] + if options.cross_only: + cmd += ['--cross-only'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + return returncode + +if __name__ == '__main__': + mesonmain.setup_vsenv() + print('Meson build system', meson_version, 'Project and Unit Tests') + raise SystemExit(main()) diff --git a/meson/run_unittests.py b/meson/run_unittests.py new file mode 100755 index 000000000..1c0e6f620 --- /dev/null +++ b/meson/run_unittests.py @@ -0,0 +1,10647 @@ +#!/usr/bin/env python3 +# Copyright 2016-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Work around some pathlib bugs... +from mesonbuild import _pathlib +import sys +sys.modules['pathlib'] = _pathlib + +import time +import stat +import subprocess +import re +import json +import tempfile +import textwrap +import os +import shutil +import unittest +import platform +import pickle +import functools +import io +import operator +import threading +import zipfile, tarfile +import hashlib +from itertools import chain +from unittest import mock +from configparser import ConfigParser +from contextlib import contextmanager +from glob import glob +from pathlib import (PurePath, Path) +from distutils.dir_util import copy_tree +import typing as T + +import mesonbuild.mlog +import mesonbuild.depfile +import mesonbuild.dependencies.base +import mesonbuild.dependencies.factory +import mesonbuild.compilers +import mesonbuild.envconfig +import mesonbuild.environment +import mesonbuild.mesonlib +import mesonbuild.coredata +import mesonbuild.modules.gnome +from mesonbuild.interpreter import Interpreter +from mesonbuild.interpreterbase import typed_pos_args, InvalidArguments, ObjectHolder +from mesonbuild.interpreterbase import typed_pos_args, InvalidArguments, typed_kwargs, ContainerTypeInfo, KwargInfo +from mesonbuild.ast import AstInterpreter +from mesonbuild.mesonlib import ( + BuildDirLock, LibType, MachineChoice, PerMachine, Version, is_windows, + is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos, + windows_proof_rmtree, windows_proof_rm, python_command, + version_compare, split_args, quote_arg, relpath, is_linux, git, + search_version +) +from mesonbuild.compilers import ( + detect_static_linker, detect_c_compiler, detect_cpp_compiler, + detect_objc_compiler, detect_objcpp_compiler, detect_d_compiler, + detect_swift_compiler, compiler_from_language +) +from mesonbuild.environment import detect_ninja +from mesonbuild.mesonlib import MesonException, EnvironmentException, OptionKey +from mesonbuild.dependencies import PkgConfigDependency +from mesonbuild.programs import ExternalProgram +import mesonbuild.dependencies.base +from mesonbuild.build import Target, ConfigurationData +import mesonbuild.modules.pkgconfig +from mesonbuild.scripts import destdir_join +from mesonbuild.optinterpreter import OptionInterpreter, OptionException + +from mesonbuild.mtest import TAPParser, TestResult +from mesonbuild.mesonmain import setup_vsenv +from mesonbuild.wrap.wrap import PackageDefinition, WrapException + +from run_tests import ( + Backend, FakeBuild, FakeCompilerOptions, + ensure_backend_detects_changes, exe_suffix, get_backend_commands, + get_builddir_target_args, get_fake_env, get_fake_options, get_meson_script, + run_configure_inprocess, run_mtest_inprocess +) + +if T.TYPE_CHECKING: + from mesonbuild.compilers import Compiler + +URLOPEN_TIMEOUT = 5 + +@contextmanager +def chdir(path: str): + curdir = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(curdir) + + +def get_dynamic_section_entry(fname: str, entry: str) -> T.Optional[str]: + if is_cygwin() or is_osx(): + raise unittest.SkipTest('Test only applicable to ELF platforms') + + try: + raw_out = subprocess.check_output(['readelf', '-d', fname], + universal_newlines=True) + except FileNotFoundError: + # FIXME: Try using depfixer.py:Elf() as a fallback + raise unittest.SkipTest('readelf not found') + pattern = re.compile(entry + r': \[(.*?)\]') + for line in raw_out.split('\n'): + m = pattern.search(line) + if m is not None: + return str(m.group(1)) + return None # The file did not contain the specified entry. + +def get_soname(fname: str) -> T.Optional[str]: + return get_dynamic_section_entry(fname, 'soname') + +def get_rpath(fname: str) -> T.Optional[str]: + raw = get_dynamic_section_entry(fname, r'(?:rpath|runpath)') + # Get both '' and None here + if not raw: + return None + # nix/nixos adds a bunch of stuff to the rpath out of necessity that we + # don't check for, so clear those + final = ':'.join([e for e in raw.split(':') if not e.startswith('/nix')]) + return final + +def is_tarball(): + if not os.path.isdir('docs'): + return True + return False + +def is_ci(): + if 'CI' in os.environ: + return True + return False + +def _git_init(project_dir): + # If a user has git configuration init.defaultBranch set we want to override that + with tempfile.TemporaryDirectory() as d: + out = git(['--version'], str(d))[1] + if version_compare(search_version(out), '>= 2.28'): + extra_cmd = ['--initial-branch', 'master'] + else: + extra_cmd = [] + + subprocess.check_call(['git', 'init'] + extra_cmd, cwd=project_dir, stdout=subprocess.DEVNULL) + subprocess.check_call(['git', 'config', + 'user.name', 'Author Person'], cwd=project_dir) + subprocess.check_call(['git', 'config', + 'user.email', 'teh_coderz@example.com'], cwd=project_dir) + _git_add_all(project_dir) + +def _git_add_all(project_dir): + subprocess.check_call('git add *', cwd=project_dir, shell=True, + stdout=subprocess.DEVNULL) + subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir, + stdout=subprocess.DEVNULL) + +@functools.lru_cache() +def is_real_gnu_compiler(path): + ''' + Check if the gcc we have is a real gcc and not a macOS wrapper around clang + ''' + if not path: + return False + out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT) + return 'Free Software Foundation' in out + +def skipIfNoExecutable(exename): + ''' + Skip this test if the given executable is not found. + ''' + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + if shutil.which(exename) is None: + raise unittest.SkipTest(exename + ' not found') + return func(*args, **kwargs) + return wrapped + return wrapper + +def skipIfNoPkgconfig(f): + ''' + Skip this test if no pkg-config is found, unless we're on CI. + This allows users to run our test suite without having + pkg-config installed on, f.ex., macOS, while ensuring that our CI does not + silently skip the test because of misconfiguration. + + Note: Yes, we provide pkg-config even while running Windows CI + ''' + @functools.wraps(f) + def wrapped(*args, **kwargs): + if not is_ci() and shutil.which('pkg-config') is None: + raise unittest.SkipTest('pkg-config not found') + return f(*args, **kwargs) + return wrapped + +def skipIfNoPkgconfigDep(depname): + ''' + Skip this test if the given pkg-config dep is not found, unless we're on CI. + ''' + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + if not is_ci() and shutil.which('pkg-config') is None: + raise unittest.SkipTest('pkg-config not found') + if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0: + raise unittest.SkipTest(f'pkg-config dependency {depname} not found.') + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_no_cmake(f): + ''' + Skip this test if no cmake is found, unless we're on CI. + This allows users to run our test suite without having + cmake installed on, f.ex., macOS, while ensuring that our CI does not + silently skip the test because of misconfiguration. + ''' + @functools.wraps(f) + def wrapped(*args, **kwargs): + if not is_ci() and shutil.which('cmake') is None: + raise unittest.SkipTest('cmake not found') + return f(*args, **kwargs) + return wrapped + +def skip_if_not_language(lang: str): + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + try: + compiler_from_language(get_fake_env(), lang, MachineChoice.HOST) + except EnvironmentException: + raise unittest.SkipTest(f'No {lang} compiler found.') + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_env_set(key): + ''' + Skip a test if a particular env is set, except when running under CI + ''' + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + old = None + if key in os.environ: + if not is_ci(): + raise unittest.SkipTest(f'Env var {key!r} set, skipping') + old = os.environ.pop(key) + try: + return func(*args, **kwargs) + finally: + if old is not None: + os.environ[key] = old + return wrapped + return wrapper + +def skip_if_not_base_option(feature): + """Skip tests if The compiler does not support a given base option. + + for example, ICC doesn't currently support b_sanitize. + """ + def actual(f): + @functools.wraps(f) + def wrapped(*args, **kwargs): + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + key = OptionKey(feature) + if key not in cc.base_options: + raise unittest.SkipTest( + f'{feature} not available with {cc.id}') + return f(*args, **kwargs) + return wrapped + return actual + + +@contextmanager +def temp_filename(): + '''A context manager which provides a filename to an empty temporary file. + + On exit the file will be deleted. + ''' + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + yield filename + finally: + try: + os.remove(filename) + except OSError: + pass + +@contextmanager +def no_pkgconfig(): + ''' + A context manager that overrides shutil.which and ExternalProgram to force + them to return None for pkg-config to simulate it not existing. + ''' + old_which = shutil.which + old_search = ExternalProgram._search + + def new_search(self, name, search_dir): + if name == 'pkg-config': + return [None] + return old_search(self, name, search_dir) + + def new_which(cmd, *kwargs): + if cmd == 'pkg-config': + return None + return old_which(cmd, *kwargs) + + shutil.which = new_which + ExternalProgram._search = new_search + try: + yield + finally: + shutil.which = old_which + ExternalProgram._search = old_search + + +class InternalTests(unittest.TestCase): + + def test_version_number(self): + self.assertEqual(search_version('foobar 1.2.3'), '1.2.3') + self.assertEqual(search_version('1.2.3'), '1.2.3') + self.assertEqual(search_version('foobar 2016.10.28 1.2.3'), '1.2.3') + self.assertEqual(search_version('2016.10.28 1.2.3'), '1.2.3') + self.assertEqual(search_version('foobar 2016.10.128'), '2016.10.128') + self.assertEqual(search_version('2016.10.128'), '2016.10.128') + self.assertEqual(search_version('2016.10'), '2016.10') + self.assertEqual(search_version('2016.10 1.2.3'), '1.2.3') + self.assertEqual(search_version('oops v1.2.3'), '1.2.3') + self.assertEqual(search_version('2016.oops 1.2.3'), '1.2.3') + self.assertEqual(search_version('2016.x'), 'unknown version') + + def test_mode_symbolic_to_bits(self): + modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits + self.assertEqual(modefunc('---------'), 0) + self.assertEqual(modefunc('r--------'), stat.S_IRUSR) + self.assertEqual(modefunc('---r-----'), stat.S_IRGRP) + self.assertEqual(modefunc('------r--'), stat.S_IROTH) + self.assertEqual(modefunc('-w-------'), stat.S_IWUSR) + self.assertEqual(modefunc('----w----'), stat.S_IWGRP) + self.assertEqual(modefunc('-------w-'), stat.S_IWOTH) + self.assertEqual(modefunc('--x------'), stat.S_IXUSR) + self.assertEqual(modefunc('-----x---'), stat.S_IXGRP) + self.assertEqual(modefunc('--------x'), stat.S_IXOTH) + self.assertEqual(modefunc('--S------'), stat.S_ISUID) + self.assertEqual(modefunc('-----S---'), stat.S_ISGID) + self.assertEqual(modefunc('--------T'), stat.S_ISVTX) + self.assertEqual(modefunc('--s------'), stat.S_ISUID | stat.S_IXUSR) + self.assertEqual(modefunc('-----s---'), stat.S_ISGID | stat.S_IXGRP) + self.assertEqual(modefunc('--------t'), stat.S_ISVTX | stat.S_IXOTH) + self.assertEqual(modefunc('rwx------'), stat.S_IRWXU) + self.assertEqual(modefunc('---rwx---'), stat.S_IRWXG) + self.assertEqual(modefunc('------rwx'), stat.S_IRWXO) + # We could keep listing combinations exhaustively but that seems + # tedious and pointless. Just test a few more. + self.assertEqual(modefunc('rwxr-xr-x'), + stat.S_IRWXU | + stat.S_IRGRP | stat.S_IXGRP | + stat.S_IROTH | stat.S_IXOTH) + self.assertEqual(modefunc('rw-r--r--'), + stat.S_IRUSR | stat.S_IWUSR | + stat.S_IRGRP | + stat.S_IROTH) + self.assertEqual(modefunc('rwsr-x---'), + stat.S_IRWXU | stat.S_ISUID | + stat.S_IRGRP | stat.S_IXGRP) + + def test_compiler_args_class_none_flush(self): + cc = mesonbuild.compilers.ClangCCompiler([], 'fake', MachineChoice.HOST, False, mock.Mock()) + a = cc.compiler_args(['-I.']) + #first we are checking if the tree construction deduplicates the correct -I argument + a += ['-I..'] + a += ['-I./tests/'] + a += ['-I./tests2/'] + #think this here as assertion, we cannot apply it, otherwise the CompilerArgs would already flush the changes: + # assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..', '-I.']) + a += ['-I.'] + a += ['-I.', '-I./tests/'] + self.assertEqual(a, ['-I.', '-I./tests/', '-I./tests2/', '-I..']) + + #then we are checking that when CompilerArgs already have a build container list, that the deduplication is taking the correct one + a += ['-I.', '-I./tests2/'] + self.assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..']) + + def test_compiler_args_class_d(self): + d = mesonbuild.compilers.DmdDCompiler([], 'fake', MachineChoice.HOST, 'info', 'arch') + # check include order is kept when deduplicating + a = d.compiler_args(['-Ifirst', '-Isecond', '-Ithird']) + a += ['-Ifirst'] + self.assertEqual(a, ['-Ifirst', '-Isecond', '-Ithird']) + + def test_compiler_args_class_clike(self): + cc = mesonbuild.compilers.ClangCCompiler([], 'fake', MachineChoice.HOST, False, mock.Mock()) + # Test that empty initialization works + a = cc.compiler_args() + self.assertEqual(a, []) + # Test that list initialization works + a = cc.compiler_args(['-I.', '-I..']) + self.assertEqual(a, ['-I.', '-I..']) + # Test that there is no de-dup on initialization + self.assertEqual(cc.compiler_args(['-I.', '-I.']), ['-I.', '-I.']) + + ## Test that appending works + a.append('-I..') + self.assertEqual(a, ['-I..', '-I.']) + a.append('-O3') + self.assertEqual(a, ['-I..', '-I.', '-O3']) + + ## Test that in-place addition works + a += ['-O2', '-O2'] + self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2', '-O2']) + # Test that removal works + a.remove('-O2') + self.assertEqual(a, ['-I..', '-I.', '-O3', '-O2']) + # Test that de-dup happens on addition + a += ['-Ifoo', '-Ifoo'] + self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2']) + + # .extend() is just +=, so we don't test it + + ## Test that addition works + # Test that adding a list with just one old arg works and yields the same array + a = a + ['-Ifoo'] + self.assertEqual(a, ['-Ifoo', '-I..', '-I.', '-O3', '-O2']) + # Test that adding a list with one arg new and one old works + a = a + ['-Ifoo', '-Ibaz'] + self.assertEqual(a, ['-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2']) + # Test that adding args that must be prepended and appended works + a = a + ['-Ibar', '-Wall'] + self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall']) + + ## Test that reflected addition works + # Test that adding to a list with just one old arg works and yields the same array + a = ['-Ifoo'] + a + self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-O3', '-O2', '-Wall']) + # Test that adding to a list with just one new arg that is not pre-pended works + a = ['-Werror'] + a + self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Werror', '-O3', '-O2', '-Wall']) + # Test that adding to a list with two new args preserves the order + a = ['-Ldir', '-Lbah'] + a + self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall']) + # Test that adding to a list with old args does nothing + a = ['-Ibar', '-Ibaz', '-Ifoo'] + a + self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall']) + + ## Test that adding libraries works + l = cc.compiler_args(['-Lfoodir', '-lfoo']) + self.assertEqual(l, ['-Lfoodir', '-lfoo']) + # Adding a library and a libpath appends both correctly + l += ['-Lbardir', '-lbar'] + self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar']) + # Adding the same library again does nothing + l += ['-lbar'] + self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar']) + + ## Test that 'direct' append and extend works + l = cc.compiler_args(['-Lfoodir', '-lfoo']) + self.assertEqual(l, ['-Lfoodir', '-lfoo']) + # Direct-adding a library and a libpath appends both correctly + l.extend_direct(['-Lbardir', '-lbar']) + self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar']) + # Direct-adding the same library again still adds it + l.append_direct('-lbar') + self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar']) + # Direct-adding with absolute path deduplicates + l.append_direct('/libbaz.a') + self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a']) + # Adding libbaz again does nothing + l.append_direct('/libbaz.a') + self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a']) + + def test_compiler_args_class_gnuld(self): + ## Test --start/end-group + linker = mesonbuild.linkers.GnuBFDDynamicLinker([], MachineChoice.HOST, '-Wl,', []) + gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) + ## Ensure that the fake compiler is never called by overriding the relevant function + gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include'] + ## Test that 'direct' append and extend works + l = gcc.compiler_args(['-Lfoodir', '-lfoo']) + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) + # Direct-adding a library and a libpath appends both correctly + l.extend_direct(['-Lbardir', '-lbar']) + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-Wl,--end-group']) + # Direct-adding the same library again still adds it + l.append_direct('-lbar') + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '-Wl,--end-group']) + # Direct-adding with absolute path deduplicates + l.append_direct('/libbaz.a') + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group']) + # Adding libbaz again does nothing + l.append_direct('/libbaz.a') + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group']) + # Adding a non-library argument doesn't include it in the group + l += ['-Lfoo', '-Wl,--export-dynamic'] + self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--end-group', '-Wl,--export-dynamic']) + # -Wl,-lfoo is detected as a library and gets added to the group + l.append('-Wl,-ldl') + self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group']) + + def test_compiler_args_remove_system(self): + ## Test --start/end-group + linker = mesonbuild.linkers.GnuBFDDynamicLinker([], MachineChoice.HOST, '-Wl,', []) + gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) + ## Ensure that the fake compiler is never called by overriding the relevant function + gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include'] + ## Test that 'direct' append and extend works + l = gcc.compiler_args(['-Lfoodir', '-lfoo']) + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) + ## Test that to_native removes all system includes + l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include'] + self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group', '-DSOMETHING_IMPORTANT=1']) + + def test_string_templates_substitution(self): + dictfunc = mesonbuild.mesonlib.get_filenames_templates_dict + substfunc = mesonbuild.mesonlib.substitute_values + ME = mesonbuild.mesonlib.MesonException + + # Identity + self.assertEqual(dictfunc([], []), {}) + + # One input, no outputs + inputs = ['bar/foo.c.in'] + outputs = [] + ret = dictfunc(inputs, outputs) + d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], + '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c'} + # Check dictionary + self.assertEqual(ret, d) + # Check substitutions + cmd = ['some', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), cmd) + cmd = ['@INPUT@.out', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:]) + cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', 'strings'] + self.assertEqual(substfunc(cmd, d), + [inputs[0] + '.out'] + [d['@PLAINNAME@'] + '.ok'] + cmd[2:]) + cmd = ['@INPUT@', '@BASENAME@.hah', 'strings'] + self.assertEqual(substfunc(cmd, d), + inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:]) + cmd = ['@OUTPUT@'] + self.assertRaises(ME, substfunc, cmd, d) + + # One input, one output + inputs = ['bar/foo.c.in'] + outputs = ['out.c'] + ret = dictfunc(inputs, outputs) + d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], + '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c', + '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': '.'} + # Check dictionary + self.assertEqual(ret, d) + # Check substitutions + cmd = ['some', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), cmd) + cmd = ['@INPUT@.out', '@OUTPUT@', 'strings'] + self.assertEqual(substfunc(cmd, d), + [inputs[0] + '.out'] + outputs + cmd[2:]) + cmd = ['@INPUT0@.out', '@PLAINNAME@.ok', '@OUTPUT0@'] + self.assertEqual(substfunc(cmd, d), + [inputs[0] + '.out', d['@PLAINNAME@'] + '.ok'] + outputs) + cmd = ['@INPUT@', '@BASENAME@.hah', 'strings'] + self.assertEqual(substfunc(cmd, d), + inputs + [d['@BASENAME@'] + '.hah'] + cmd[2:]) + + # One input, one output with a subdir + outputs = ['dir/out.c'] + ret = dictfunc(inputs, outputs) + d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], + '@PLAINNAME@': 'foo.c.in', '@BASENAME@': 'foo.c', + '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'} + # Check dictionary + self.assertEqual(ret, d) + + # Two inputs, no outputs + inputs = ['bar/foo.c.in', 'baz/foo.c.in'] + outputs = [] + ret = dictfunc(inputs, outputs) + d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1]} + # Check dictionary + self.assertEqual(ret, d) + # Check substitutions + cmd = ['some', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), cmd) + cmd = ['@INPUT@', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), inputs + cmd[1:]) + cmd = ['@INPUT0@.out', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out'] + cmd[1:]) + cmd = ['@INPUT0@.out', '@INPUT1@.ok', 'strings'] + self.assertEqual(substfunc(cmd, d), [inputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:]) + cmd = ['@INPUT0@', '@INPUT1@', 'strings'] + self.assertEqual(substfunc(cmd, d), inputs + cmd[2:]) + # Many inputs, can't use @INPUT@ like this + cmd = ['@INPUT@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Not enough inputs + cmd = ['@INPUT2@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Too many inputs + cmd = ['@PLAINNAME@'] + self.assertRaises(ME, substfunc, cmd, d) + cmd = ['@BASENAME@'] + self.assertRaises(ME, substfunc, cmd, d) + # No outputs + cmd = ['@OUTPUT@'] + self.assertRaises(ME, substfunc, cmd, d) + cmd = ['@OUTPUT0@'] + self.assertRaises(ME, substfunc, cmd, d) + cmd = ['@OUTDIR@'] + self.assertRaises(ME, substfunc, cmd, d) + + # Two inputs, one output + outputs = ['dir/out.c'] + ret = dictfunc(inputs, outputs) + d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1], + '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTDIR@': 'dir'} + # Check dictionary + self.assertEqual(ret, d) + # Check substitutions + cmd = ['some', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), cmd) + cmd = ['@OUTPUT@', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), outputs + cmd[1:]) + cmd = ['@OUTPUT@.out', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out'] + cmd[1:]) + cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', 'strings'] + self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok'] + cmd[2:]) + # Many inputs, can't use @INPUT@ like this + cmd = ['@INPUT@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Not enough inputs + cmd = ['@INPUT2@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Not enough outputs + cmd = ['@OUTPUT2@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + + # Two inputs, two outputs + outputs = ['dir/out.c', 'dir/out2.c'] + ret = dictfunc(inputs, outputs) + d = {'@INPUT@': inputs, '@INPUT0@': inputs[0], '@INPUT1@': inputs[1], + '@OUTPUT@': outputs, '@OUTPUT0@': outputs[0], '@OUTPUT1@': outputs[1], + '@OUTDIR@': 'dir'} + # Check dictionary + self.assertEqual(ret, d) + # Check substitutions + cmd = ['some', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), cmd) + cmd = ['@OUTPUT@', 'ordinary', 'strings'] + self.assertEqual(substfunc(cmd, d), outputs + cmd[1:]) + cmd = ['@OUTPUT0@', '@OUTPUT1@', 'strings'] + self.assertEqual(substfunc(cmd, d), outputs + cmd[2:]) + cmd = ['@OUTPUT0@.out', '@INPUT1@.ok', '@OUTDIR@'] + self.assertEqual(substfunc(cmd, d), [outputs[0] + '.out', inputs[1] + '.ok', 'dir']) + # Many inputs, can't use @INPUT@ like this + cmd = ['@INPUT@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Not enough inputs + cmd = ['@INPUT2@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Not enough outputs + cmd = ['@OUTPUT2@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + # Many outputs, can't use @OUTPUT@ like this + cmd = ['@OUTPUT@.out', 'ordinary', 'strings'] + self.assertRaises(ME, substfunc, cmd, d) + + def test_needs_exe_wrapper_override(self): + config = ConfigParser() + config['binaries'] = { + 'c': '\'/usr/bin/gcc\'', + } + config['host_machine'] = { + 'system': '\'linux\'', + 'cpu_family': '\'arm\'', + 'cpu': '\'armv7\'', + 'endian': '\'little\'', + } + # Can not be used as context manager because we need to + # open it a second time and this is not possible on + # Windows. + configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) + configfilename = configfile.name + config.write(configfile) + configfile.flush() + configfile.close() + opts = get_fake_options() + opts.cross_file = (configfilename,) + env = get_fake_env(opts=opts) + detected_value = env.need_exe_wrapper() + os.unlink(configfilename) + + desired_value = not detected_value + config['properties'] = { + 'needs_exe_wrapper': 'true' if desired_value else 'false' + } + + configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) + configfilename = configfile.name + config.write(configfile) + configfile.close() + opts = get_fake_options() + opts.cross_file = (configfilename,) + env = get_fake_env(opts=opts) + forced_value = env.need_exe_wrapper() + os.unlink(configfilename) + + self.assertEqual(forced_value, desired_value) + + def test_listify(self): + listify = mesonbuild.mesonlib.listify + # Test sanity + self.assertEqual([1], listify(1)) + self.assertEqual([], listify([])) + self.assertEqual([1], listify([1])) + # Test flattening + self.assertEqual([1, 2, 3], listify([1, [2, 3]])) + self.assertEqual([1, 2, 3], listify([1, [2, [3]]])) + self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False)) + # Test flattening and unholdering + class TestHeldObj(mesonbuild.mesonlib.HoldableObject): + def __init__(self, val: int) -> None: + self._val = val + class MockInterpreter: + def __init__(self) -> None: + self.subproject = '' + self.environment = None + heldObj1 = TestHeldObj(1) + holder1 = ObjectHolder(heldObj1, MockInterpreter()) + self.assertEqual([holder1], listify(holder1)) + self.assertEqual([holder1], listify([holder1])) + self.assertEqual([holder1, 2], listify([holder1, 2])) + self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]])) + + def test_extract_as_list(self): + extract = mesonbuild.mesonlib.extract_as_list + # Test sanity + kwargs = {'sources': [1, 2, 3]} + self.assertEqual([1, 2, 3], extract(kwargs, 'sources')) + self.assertEqual(kwargs, {'sources': [1, 2, 3]}) + self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True)) + self.assertEqual(kwargs, {}) + + class TestHeldObj(mesonbuild.mesonlib.HoldableObject): + pass + class MockInterpreter: + def __init__(self) -> None: + self.subproject = '' + self.environment = None + heldObj = TestHeldObj() + + # Test unholding + holder3 = ObjectHolder(heldObj, MockInterpreter()) + kwargs = {'sources': [1, 2, holder3]} + self.assertEqual(kwargs, {'sources': [1, 2, holder3]}) + + # flatten nested lists + kwargs = {'sources': [1, [2, [3]]]} + self.assertEqual([1, 2, 3], extract(kwargs, 'sources')) + + def test_pkgconfig_module(self): + dummystate = mock.Mock() + dummystate.subproject = 'dummy' + _mock = mock.Mock(spec=mesonbuild.dependencies.ExternalDependency) + _mock.pcdep = mock.Mock() + _mock.pcdep.name = "some_name" + _mock.version_reqs = [] + + # pkgconfig dependency as lib + deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib") + deps.add_pub_libs([_mock]) + self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") + + # pkgconfig dependency as requires + deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib") + deps.add_pub_reqs([_mock]) + self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") + + def _test_all_naming(self, cc, env, patterns, platform): + shr = patterns[platform]['shared'] + stc = patterns[platform]['static'] + shrstc = shr + tuple([x for x in stc if x not in shr]) + stcshr = stc + tuple([x for x in shr if x not in stc]) + p = cc.get_library_naming(env, LibType.SHARED) + self.assertEqual(p, shr) + p = cc.get_library_naming(env, LibType.STATIC) + self.assertEqual(p, stc) + p = cc.get_library_naming(env, LibType.PREFER_STATIC) + self.assertEqual(p, stcshr) + p = cc.get_library_naming(env, LibType.PREFER_SHARED) + self.assertEqual(p, shrstc) + # Test find library by mocking up openbsd + if platform != 'openbsd': + return + with tempfile.TemporaryDirectory() as tmpdir: + for i in ['libfoo.so.6.0', 'libfoo.so.5.0', 'libfoo.so.54.0', 'libfoo.so.66a.0b', 'libfoo.so.70.0.so.1']: + libpath = Path(tmpdir) / i + libpath.write_text('', encoding='utf-8') + found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED) + self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0') + + def test_find_library_patterns(self): + ''' + Unit test for the library search patterns used by find_library() + ''' + unix_static = ('lib{}.a', '{}.a') + msvc_static = ('lib{}.a', 'lib{}.lib', '{}.a', '{}.lib') + # This is the priority list of pattern matching for library searching + patterns = {'openbsd': {'shared': ('lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*', '{}.so.[0-9]*.[0-9]*'), + 'static': unix_static}, + 'linux': {'shared': ('lib{}.so', '{}.so'), + 'static': unix_static}, + 'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'), + 'static': unix_static}, + 'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll', + 'lib{}.dll.a', '{}.dll', '{}.dll.a'), + 'static': ('cyg{}.a',) + unix_static}, + 'windows-msvc': {'shared': ('lib{}.lib', '{}.lib'), + 'static': msvc_static}, + 'windows-mingw': {'shared': ('lib{}.dll.a', 'lib{}.lib', 'lib{}.dll', + '{}.dll.a', '{}.lib', '{}.dll'), + 'static': msvc_static}} + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + if is_osx(): + self._test_all_naming(cc, env, patterns, 'darwin') + elif is_cygwin(): + self._test_all_naming(cc, env, patterns, 'cygwin') + elif is_windows(): + if cc.get_argument_syntax() == 'msvc': + self._test_all_naming(cc, env, patterns, 'windows-msvc') + else: + self._test_all_naming(cc, env, patterns, 'windows-mingw') + elif is_openbsd(): + self._test_all_naming(cc, env, patterns, 'openbsd') + else: + self._test_all_naming(cc, env, patterns, 'linux') + env.machines.host.system = 'openbsd' + self._test_all_naming(cc, env, patterns, 'openbsd') + env.machines.host.system = 'darwin' + self._test_all_naming(cc, env, patterns, 'darwin') + env.machines.host.system = 'cygwin' + self._test_all_naming(cc, env, patterns, 'cygwin') + env.machines.host.system = 'windows' + self._test_all_naming(cc, env, patterns, 'windows-mingw') + + @skipIfNoPkgconfig + def test_pkgconfig_parse_libs(self): + ''' + Unit test for parsing of pkg-config output to search for libraries + + https://github.com/mesonbuild/meson/issues/3951 + ''' + def create_static_lib(name): + if not is_osx(): + name.open('w', encoding='utf-8').close() + return + src = name.with_suffix('.c') + out = name.with_suffix('.o') + with src.open('w', encoding='utf-8') as f: + f.write('int meson_foobar (void) { return 0; }') + subprocess.check_call(['clang', '-c', str(src), '-o', str(out)]) + subprocess.check_call(['ar', 'csr', str(name), str(out)]) + + with tempfile.TemporaryDirectory() as tmpdir: + pkgbin = ExternalProgram('pkg-config', command=['pkg-config'], silent=True) + env = get_fake_env() + compiler = detect_c_compiler(env, MachineChoice.HOST) + env.coredata.compilers.host = {'c': compiler} + env.coredata.options[OptionKey('link_args', lang='c')] = FakeCompilerOptions() + p1 = Path(tmpdir) / '1' + p2 = Path(tmpdir) / '2' + p1.mkdir() + p2.mkdir() + # libfoo.a is in one prefix + create_static_lib(p1 / 'libfoo.a') + # libbar.a is in both prefixes + create_static_lib(p1 / 'libbar.a') + create_static_lib(p2 / 'libbar.a') + # Ensure that we never statically link to these + create_static_lib(p1 / 'libpthread.a') + create_static_lib(p1 / 'libm.a') + create_static_lib(p1 / 'libc.a') + create_static_lib(p1 / 'libdl.a') + create_static_lib(p1 / 'librt.a') + + def fake_call_pkgbin(self, args, env=None): + if '--libs' not in args: + return 0, '', '' + if args[-1] == 'foo': + return 0, f'-L{p2.as_posix()} -lfoo -L{p1.as_posix()} -lbar', '' + if args[-1] == 'bar': + return 0, f'-L{p2.as_posix()} -lbar', '' + if args[-1] == 'internal': + return 0, f'-L{p1.as_posix()} -lpthread -lm -lc -lrt -ldl', '' + + old_call = PkgConfigDependency._call_pkgbin + old_check = PkgConfigDependency.check_pkgconfig + PkgConfigDependency._call_pkgbin = fake_call_pkgbin + PkgConfigDependency.check_pkgconfig = lambda x, _: pkgbin + # Test begins + try: + kwargs = {'required': True, 'silent': True} + foo_dep = PkgConfigDependency('foo', env, kwargs) + self.assertEqual(foo_dep.get_link_args(), + [(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()]) + bar_dep = PkgConfigDependency('bar', env, kwargs) + self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()]) + internal_dep = PkgConfigDependency('internal', env, kwargs) + if compiler.get_argument_syntax() == 'msvc': + self.assertEqual(internal_dep.get_link_args(), []) + else: + link_args = internal_dep.get_link_args() + for link_arg in link_args: + for lib in ('pthread', 'm', 'c', 'dl', 'rt'): + self.assertNotIn(f'lib{lib}.a', link_arg, msg=link_args) + finally: + # Test ends + PkgConfigDependency._call_pkgbin = old_call + PkgConfigDependency.check_pkgconfig = old_check + # Reset dependency class to ensure that in-process configure doesn't mess up + PkgConfigDependency.pkgbin_cache = {} + PkgConfigDependency.class_pkgbin = PerMachine(None, None) + + def test_version_compare(self): + comparefunc = mesonbuild.mesonlib.version_compare_many + for (a, b, result) in [ + ('0.99.beta19', '>= 0.99.beta14', True), + ]: + self.assertEqual(comparefunc(a, b)[0], result) + + for (a, b, op) in [ + # examples from https://fedoraproject.org/wiki/Archive:Tools/RPM/VersionComparison + ("1.0010", "1.9", operator.gt), + ("1.05", "1.5", operator.eq), + ("1.0", "1", operator.gt), + ("2.50", "2.5", operator.gt), + ("fc4", "fc.4", operator.eq), + ("FC5", "fc4", operator.lt), + ("2a", "2.0", operator.lt), + ("1.0", "1.fc4", operator.gt), + ("3.0.0_fc", "3.0.0.fc", operator.eq), + # from RPM tests + ("1.0", "1.0", operator.eq), + ("1.0", "2.0", operator.lt), + ("2.0", "1.0", operator.gt), + ("2.0.1", "2.0.1", operator.eq), + ("2.0", "2.0.1", operator.lt), + ("2.0.1", "2.0", operator.gt), + ("2.0.1a", "2.0.1a", operator.eq), + ("2.0.1a", "2.0.1", operator.gt), + ("2.0.1", "2.0.1a", operator.lt), + ("5.5p1", "5.5p1", operator.eq), + ("5.5p1", "5.5p2", operator.lt), + ("5.5p2", "5.5p1", operator.gt), + ("5.5p10", "5.5p10", operator.eq), + ("5.5p1", "5.5p10", operator.lt), + ("5.5p10", "5.5p1", operator.gt), + ("10xyz", "10.1xyz", operator.lt), + ("10.1xyz", "10xyz", operator.gt), + ("xyz10", "xyz10", operator.eq), + ("xyz10", "xyz10.1", operator.lt), + ("xyz10.1", "xyz10", operator.gt), + ("xyz.4", "xyz.4", operator.eq), + ("xyz.4", "8", operator.lt), + ("8", "xyz.4", operator.gt), + ("xyz.4", "2", operator.lt), + ("2", "xyz.4", operator.gt), + ("5.5p2", "5.6p1", operator.lt), + ("5.6p1", "5.5p2", operator.gt), + ("5.6p1", "6.5p1", operator.lt), + ("6.5p1", "5.6p1", operator.gt), + ("6.0.rc1", "6.0", operator.gt), + ("6.0", "6.0.rc1", operator.lt), + ("10b2", "10a1", operator.gt), + ("10a2", "10b2", operator.lt), + ("1.0aa", "1.0aa", operator.eq), + ("1.0a", "1.0aa", operator.lt), + ("1.0aa", "1.0a", operator.gt), + ("10.0001", "10.0001", operator.eq), + ("10.0001", "10.1", operator.eq), + ("10.1", "10.0001", operator.eq), + ("10.0001", "10.0039", operator.lt), + ("10.0039", "10.0001", operator.gt), + ("4.999.9", "5.0", operator.lt), + ("5.0", "4.999.9", operator.gt), + ("20101121", "20101121", operator.eq), + ("20101121", "20101122", operator.lt), + ("20101122", "20101121", operator.gt), + ("2_0", "2_0", operator.eq), + ("2.0", "2_0", operator.eq), + ("2_0", "2.0", operator.eq), + ("a", "a", operator.eq), + ("a+", "a+", operator.eq), + ("a+", "a_", operator.eq), + ("a_", "a+", operator.eq), + ("+a", "+a", operator.eq), + ("+a", "_a", operator.eq), + ("_a", "+a", operator.eq), + ("+_", "+_", operator.eq), + ("_+", "+_", operator.eq), + ("_+", "_+", operator.eq), + ("+", "_", operator.eq), + ("_", "+", operator.eq), + # other tests + ('0.99.beta19', '0.99.beta14', operator.gt), + ("1.0.0", "2.0.0", operator.lt), + (".0.0", "2.0.0", operator.lt), + ("alpha", "beta", operator.lt), + ("1.0", "1.0.0", operator.lt), + ("2.456", "2.1000", operator.lt), + ("2.1000", "3.111", operator.lt), + ("2.001", "2.1", operator.eq), + ("2.34", "2.34", operator.eq), + ("6.1.2", "6.3.8", operator.lt), + ("1.7.3.0", "2.0.0", operator.lt), + ("2.24.51", "2.25", operator.lt), + ("2.1.5+20120813+gitdcbe778", "2.1.5", operator.gt), + ("3.4.1", "3.4b1", operator.gt), + ("041206", "200090325", operator.lt), + ("0.6.2+git20130413", "0.6.2", operator.gt), + ("2.6.0+bzr6602", "2.6.0", operator.gt), + ("2.6.0", "2.6b2", operator.gt), + ("2.6.0+bzr6602", "2.6b2x", operator.gt), + ("0.6.7+20150214+git3a710f9", "0.6.7", operator.gt), + ("15.8b", "15.8.0.1", operator.lt), + ("1.2rc1", "1.2.0", operator.lt), + ]: + ver_a = Version(a) + ver_b = Version(b) + if op is operator.eq: + for o, name in [(op, 'eq'), (operator.ge, 'ge'), (operator.le, 'le')]: + self.assertTrue(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') + if op is operator.lt: + for o, name in [(op, 'lt'), (operator.le, 'le'), (operator.ne, 'ne')]: + self.assertTrue(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') + for o, name in [(operator.gt, 'gt'), (operator.ge, 'ge'), (operator.eq, 'eq')]: + self.assertFalse(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') + if op is operator.gt: + for o, name in [(op, 'gt'), (operator.ge, 'ge'), (operator.ne, 'ne')]: + self.assertTrue(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') + for o, name in [(operator.lt, 'lt'), (operator.le, 'le'), (operator.eq, 'eq')]: + self.assertFalse(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') + + def test_msvc_toolset_version(self): + ''' + Ensure that the toolset version returns the correct value for this MSVC + ''' + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_argument_syntax() != 'msvc': + raise unittest.SkipTest('Test only applies to MSVC-like compilers') + toolset_ver = cc.get_toolset_version() + self.assertIsNotNone(toolset_ver) + # Visual Studio 2015 and older versions do not define VCToolsVersion + # TODO: ICL doesn't set this in the VSC2015 profile either + if cc.id == 'msvc' and int(''.join(cc.version.split('.')[0:2])) < 1910: + return + if 'VCToolsVersion' in os.environ: + vctools_ver = os.environ['VCToolsVersion'] + else: + self.assertIn('VCINSTALLDIR', os.environ) + # See https://devblogs.microsoft.com/cppblog/finding-the-visual-c-compiler-tools-in-visual-studio-2017/ + vctools_ver = (Path(os.environ['VCINSTALLDIR']) / 'Auxiliary' / 'Build' / 'Microsoft.VCToolsVersion.default.txt').read_text(encoding='utf-8') + self.assertTrue(vctools_ver.startswith(toolset_ver), + msg=f'{vctools_ver!r} does not start with {toolset_ver!r}') + + def test_split_args(self): + split_args = mesonbuild.mesonlib.split_args + join_args = mesonbuild.mesonlib.join_args + if is_windows(): + test_data = [ + # examples from https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments + (r'"a b c" d e', ['a b c', 'd', 'e'], True), + (r'"ab\"c" "\\" d', ['ab"c', '\\', 'd'], False), + (r'a\\\b d"e f"g h', [r'a\\\b', 'de fg', 'h'], False), + (r'a\\\"b c d', [r'a\"b', 'c', 'd'], False), + (r'a\\\\"b c" d e', [r'a\\b c', 'd', 'e'], False), + # other basics + (r'""', [''], True), + (r'a b c d "" e', ['a', 'b', 'c', 'd', '', 'e'], True), + (r"'a b c' d e", ["'a", 'b', "c'", 'd', 'e'], True), + (r"'a&b&c' d e", ["'a&b&c'", 'd', 'e'], True), + (r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], True), + (r"'a & b & c d e'", ["'a", '&', 'b', '&', 'c', 'd', "e'"], True), + ('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False), + # more illustrative tests + (r'cl test.cpp /O1 /Fe:test.exe', ['cl', 'test.cpp', '/O1', '/Fe:test.exe'], True), + (r'cl "test.cpp /O1 /Fe:test.exe"', ['cl', 'test.cpp /O1 /Fe:test.exe'], True), + (r'cl /DNAME=\"Bob\" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], False), + (r'cl "/DNAME=\"Bob\"" test.cpp', ['cl', '/DNAME="Bob"', 'test.cpp'], True), + (r'cl /DNAME=\"Bob, Alice\" test.cpp', ['cl', '/DNAME="Bob,', 'Alice"', 'test.cpp'], False), + (r'cl "/DNAME=\"Bob, Alice\"" test.cpp', ['cl', '/DNAME="Bob, Alice"', 'test.cpp'], True), + (r'cl C:\path\with\backslashes.cpp', ['cl', r'C:\path\with\backslashes.cpp'], True), + (r'cl C:\\path\\with\\double\\backslashes.cpp', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], True), + (r'cl "C:\\path\\with\\double\\backslashes.cpp"', ['cl', r'C:\\path\\with\\double\\backslashes.cpp'], False), + (r'cl C:\path with spaces\test.cpp', ['cl', r'C:\path', 'with', r'spaces\test.cpp'], False), + (r'cl "C:\path with spaces\test.cpp"', ['cl', r'C:\path with spaces\test.cpp'], True), + (r'cl /DPATH="C:\path\with\backslashes test.cpp', ['cl', r'/DPATH=C:\path\with\backslashes test.cpp'], False), + (r'cl /DPATH=\"C:\\ends\\with\\backslashes\\\" test.cpp', ['cl', r'/DPATH="C:\\ends\\with\\backslashes\"', 'test.cpp'], False), + (r'cl /DPATH="C:\\ends\\with\\backslashes\\" test.cpp', ['cl', '/DPATH=C:\\\\ends\\\\with\\\\backslashes\\', 'test.cpp'], False), + (r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\"', 'test.cpp'], True), + (r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\ test.cpp'], False), + (r'cl "/DNAME=\"C:\\ends\\with\\backslashes\\\\\"" test.cpp', ['cl', r'/DNAME="C:\\ends\\with\\backslashes\\"', 'test.cpp'], True), + ] + else: + test_data = [ + (r"'a b c' d e", ['a b c', 'd', 'e'], True), + (r"a/b/c d e", ['a/b/c', 'd', 'e'], True), + (r"a\b\c d e", [r'abc', 'd', 'e'], False), + (r"a\\b\\c d e", [r'a\b\c', 'd', 'e'], False), + (r'"a b c" d e', ['a b c', 'd', 'e'], False), + (r'"a\\b\\c\\" d e', ['a\\b\\c\\', 'd', 'e'], False), + (r"'a\b\c\' d e", ['a\\b\\c\\', 'd', 'e'], True), + (r"'a&b&c' d e", ['a&b&c', 'd', 'e'], True), + (r"a & b & c d e", ['a', '&', 'b', '&', 'c', 'd', 'e'], False), + (r"'a & b & c d e'", ['a & b & c d e'], True), + (r"abd'e f'g h", [r'abde fg', 'h'], False), + ('a b\nc\rd \n\re', ['a', 'b', 'c', 'd', 'e'], False), + + ('g++ -DNAME="Bob" test.cpp', ['g++', '-DNAME=Bob', 'test.cpp'], False), + ("g++ '-DNAME=\"Bob\"' test.cpp", ['g++', '-DNAME="Bob"', 'test.cpp'], True), + ('g++ -DNAME="Bob, Alice" test.cpp', ['g++', '-DNAME=Bob, Alice', 'test.cpp'], False), + ("g++ '-DNAME=\"Bob, Alice\"' test.cpp", ['g++', '-DNAME="Bob, Alice"', 'test.cpp'], True), + ] + + for (cmd, expected, roundtrip) in test_data: + self.assertEqual(split_args(cmd), expected) + if roundtrip: + self.assertEqual(join_args(expected), cmd) + + def test_quote_arg(self): + split_args = mesonbuild.mesonlib.split_args + quote_arg = mesonbuild.mesonlib.quote_arg + if is_windows(): + test_data = [ + ('', '""'), + ('arg1', 'arg1'), + ('/option1', '/option1'), + ('/Ovalue', '/Ovalue'), + ('/OBob&Alice', '/OBob&Alice'), + ('/Ovalue with spaces', r'"/Ovalue with spaces"'), + (r'/O"value with spaces"', r'"/O\"value with spaces\""'), + (r'/OC:\path with spaces\test.exe', r'"/OC:\path with spaces\test.exe"'), + ('/LIBPATH:C:\\path with spaces\\ends\\with\\backslashes\\', r'"/LIBPATH:C:\path with spaces\ends\with\backslashes\\"'), + ('/LIBPATH:"C:\\path with spaces\\ends\\with\\backslashes\\\\"', r'"/LIBPATH:\"C:\path with spaces\ends\with\backslashes\\\\\""'), + (r'/DMSG="Alice said: \"Let\'s go\""', r'"/DMSG=\"Alice said: \\\"Let\'s go\\\"\""'), + ] + else: + test_data = [ + ('arg1', 'arg1'), + ('--option1', '--option1'), + ('-O=value', '-O=value'), + ('-O=Bob&Alice', "'-O=Bob&Alice'"), + ('-O=value with spaces', "'-O=value with spaces'"), + ('-O="value with spaces"', '\'-O=\"value with spaces\"\''), + ('-O=/path with spaces/test', '\'-O=/path with spaces/test\''), + ('-DMSG="Alice said: \\"Let\'s go\\""', "'-DMSG=\"Alice said: \\\"Let'\"'\"'s go\\\"\"'"), + ] + + for (arg, expected) in test_data: + self.assertEqual(quote_arg(arg), expected) + self.assertEqual(split_args(expected)[0], arg) + + def test_depfile(self): + for (f, target, expdeps) in [ + # empty, unknown target + ([''], 'unknown', set()), + # simple target & deps + (['meson/foo.o : foo.c foo.h'], 'meson/foo.o', set({'foo.c', 'foo.h'})), + (['meson/foo.o: foo.c foo.h'], 'foo.c', set()), + # get all deps + (['meson/foo.o: foo.c foo.h', + 'foo.c: gen.py'], 'meson/foo.o', set({'foo.c', 'foo.h', 'gen.py'})), + (['meson/foo.o: foo.c foo.h', + 'foo.c: gen.py'], 'foo.c', set({'gen.py'})), + # linue continuation, multiple targets + (['foo.o \\', 'foo.h: bar'], 'foo.h', set({'bar'})), + (['foo.o \\', 'foo.h: bar'], 'foo.o', set({'bar'})), + # \\ handling + (['foo: Program\\ F\\iles\\\\X'], 'foo', set({'Program Files\\X'})), + # $ handling + (['f$o.o: c/b'], 'f$o.o', set({'c/b'})), + (['f$$o.o: c/b'], 'f$o.o', set({'c/b'})), + # cycles + (['a: b', 'b: a'], 'a', set({'a', 'b'})), + (['a: b', 'b: a'], 'b', set({'a', 'b'})), + ]: + d = mesonbuild.depfile.DepFile(f) + deps = d.get_all_dependencies(target) + self.assertEqual(sorted(deps), sorted(expdeps)) + + def test_log_once(self): + f = io.StringIO() + with mock.patch('mesonbuild.mlog.log_file', f), \ + mock.patch('mesonbuild.mlog._logged_once', set()): + mesonbuild.mlog.log_once('foo') + mesonbuild.mlog.log_once('foo') + actual = f.getvalue().strip() + self.assertEqual(actual, 'foo', actual) + + def test_log_once_ansi(self): + f = io.StringIO() + with mock.patch('mesonbuild.mlog.log_file', f), \ + mock.patch('mesonbuild.mlog._logged_once', set()): + mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo')) + mesonbuild.mlog.log_once(mesonbuild.mlog.bold('foo')) + actual = f.getvalue().strip() + self.assertEqual(actual.count('foo'), 1, actual) + + mesonbuild.mlog.log_once('foo') + actual = f.getvalue().strip() + self.assertEqual(actual.count('foo'), 1, actual) + + f.truncate() + + mesonbuild.mlog.warning('bar', once=True) + mesonbuild.mlog.warning('bar', once=True) + actual = f.getvalue().strip() + self.assertEqual(actual.count('bar'), 1, actual) + + def test_sort_libpaths(self): + sort_libpaths = mesonbuild.dependencies.base.sort_libpaths + self.assertEqual(sort_libpaths( + ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib'], + ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']), + ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) + self.assertEqual(sort_libpaths( + ['/usr/local/lib', '/home/mesonuser/.local/lib', '/usr/lib'], + ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']), + ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) + self.assertEqual(sort_libpaths( + ['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'], + ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/lib/pkgconfig']), + ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) + self.assertEqual(sort_libpaths( + ['/usr/lib', '/usr/local/lib', '/home/mesonuser/.local/lib'], + ['/home/mesonuser/.local/lib/pkgconfig', '/usr/local/libdata/pkgconfig']), + ['/home/mesonuser/.local/lib', '/usr/local/lib', '/usr/lib']) + + def test_dependency_factory_order(self): + b = mesonbuild.dependencies.base + F = mesonbuild.dependencies.factory + with tempfile.TemporaryDirectory() as tmpdir: + with chdir(tmpdir): + env = get_fake_env() + env.scratch_dir = tmpdir + + f = F.DependencyFactory( + 'test_dep', + methods=[b.DependencyMethods.PKGCONFIG, b.DependencyMethods.CMAKE] + ) + actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})] + self.assertListEqual([m.type_name for m in actual], ['pkgconfig', 'cmake']) + + f = F.DependencyFactory( + 'test_dep', + methods=[b.DependencyMethods.CMAKE, b.DependencyMethods.PKGCONFIG] + ) + actual = [m() for m in f(env, MachineChoice.HOST, {'required': False})] + self.assertListEqual([m.type_name for m in actual], ['cmake', 'pkgconfig']) + + def test_validate_json(self) -> None: + """Validate the json schema for the test cases.""" + try: + from jsonschema import validate, ValidationError + except ImportError: + if is_ci(): + raise + raise unittest.SkipTest('Python jsonschema module not found.') + + schema = json.loads(Path('data/test.schema.json').read_text(encoding='utf-8')) + + errors = [] # type: T.Tuple[str, Exception] + for p in Path('test cases').glob('**/test.json'): + try: + validate(json.loads(p.read_text(encoding='utf-8')), schema=schema) + except ValidationError as e: + errors.append((p.resolve(), e)) + + for f, e in errors: + print(f'Failed to validate: "{f}"') + print(str(e)) + + self.assertFalse(errors) + + def test_typed_pos_args_types(self) -> None: + @typed_pos_args('foo', str, int, bool) + def _(obj, node, args: T.Tuple[str, int, bool], kwargs) -> None: + self.assertIsInstance(args, tuple) + self.assertIsInstance(args[0], str) + self.assertIsInstance(args[1], int) + self.assertIsInstance(args[2], bool) + + _(None, mock.Mock(), ['string', 1, False], None) + + def test_typed_pos_args_types_invalid(self) -> None: + @typed_pos_args('foo', str, int, bool) + def _(obj, node, args: T.Tuple[str, int, bool], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 1.0, False], None) + self.assertEqual(str(cm.exception), 'foo argument 2 was of type "float" but should have been "int"') + + def test_typed_pos_args_types_wrong_number(self) -> None: + @typed_pos_args('foo', str, int, bool) + def _(obj, node, args: T.Tuple[str, int, bool], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 1], None) + self.assertEqual(str(cm.exception), 'foo takes exactly 3 arguments, but got 2.') + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 1, True, True], None) + self.assertEqual(str(cm.exception), 'foo takes exactly 3 arguments, but got 4.') + + def test_typed_pos_args_varargs(self) -> None: + @typed_pos_args('foo', str, varargs=str) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertIsInstance(args, tuple) + self.assertIsInstance(args[0], str) + self.assertIsInstance(args[1], list) + self.assertIsInstance(args[1][0], str) + self.assertIsInstance(args[1][1], str) + + _(None, mock.Mock(), ['string', 'var', 'args'], None) + + def test_typed_pos_args_varargs_not_given(self) -> None: + @typed_pos_args('foo', str, varargs=str) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertIsInstance(args, tuple) + self.assertIsInstance(args[0], str) + self.assertIsInstance(args[1], list) + self.assertEqual(args[1], []) + + _(None, mock.Mock(), ['string'], None) + + def test_typed_pos_args_varargs_invalid(self) -> None: + @typed_pos_args('foo', str, varargs=str) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 'var', 'args', 0], None) + self.assertEqual(str(cm.exception), 'foo argument 4 was of type "int" but should have been "str"') + + def test_typed_pos_args_varargs_invalid_mulitple_types(self) -> None: + @typed_pos_args('foo', str, varargs=(str, list)) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 'var', 'args', 0], None) + self.assertEqual(str(cm.exception), 'foo argument 4 was of type "int" but should have been one of: "str", "list"') + + def test_typed_pos_args_max_varargs(self) -> None: + @typed_pos_args('foo', str, varargs=str, max_varargs=5) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertIsInstance(args, tuple) + self.assertIsInstance(args[0], str) + self.assertIsInstance(args[1], list) + self.assertIsInstance(args[1][0], str) + self.assertIsInstance(args[1][1], str) + + _(None, mock.Mock(), ['string', 'var', 'args'], None) + + def test_typed_pos_args_max_varargs_exceeded(self) -> None: + @typed_pos_args('foo', str, varargs=str, max_varargs=1) + def _(obj, node, args: T.Tuple[str, T.Tuple[str, ...]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 'var', 'args'], None) + self.assertEqual(str(cm.exception), 'foo takes between 1 and 2 arguments, but got 3.') + + def test_typed_pos_args_min_varargs(self) -> None: + @typed_pos_args('foo', varargs=str, max_varargs=2, min_varargs=1) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertIsInstance(args, tuple) + self.assertIsInstance(args[0], list) + self.assertIsInstance(args[0][0], str) + self.assertIsInstance(args[0][1], str) + + _(None, mock.Mock(), ['string', 'var'], None) + + def test_typed_pos_args_min_varargs_not_met(self) -> None: + @typed_pos_args('foo', str, varargs=str, min_varargs=1) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string'], None) + self.assertEqual(str(cm.exception), 'foo takes at least 2 arguments, but got 1.') + + def test_typed_pos_args_min_and_max_varargs_exceeded(self) -> None: + @typed_pos_args('foo', str, varargs=str, min_varargs=1, max_varargs=2) + def _(obj, node, args: T.Tuple[str, T.Tuple[str, ...]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', 'var', 'args', 'bar'], None) + self.assertEqual(str(cm.exception), 'foo takes between 2 and 3 arguments, but got 4.') + + def test_typed_pos_args_min_and_max_varargs_not_met(self) -> None: + @typed_pos_args('foo', str, varargs=str, min_varargs=1, max_varargs=2) + def _(obj, node, args: T.Tuple[str, T.Tuple[str, ...]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string'], None) + self.assertEqual(str(cm.exception), 'foo takes between 2 and 3 arguments, but got 1.') + + def test_typed_pos_args_variadic_and_optional(self) -> None: + @typed_pos_args('foo', str, optargs=[str], varargs=str, min_varargs=0) + def _(obj, node, args: T.Tuple[str, T.List[str]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(AssertionError) as cm: + _(None, mock.Mock(), ['string'], None) + self.assertEqual( + str(cm.exception), + 'varargs and optargs not supported together as this would be ambiguous') + + def test_typed_pos_args_min_optargs_not_met(self) -> None: + @typed_pos_args('foo', str, str, optargs=[str]) + def _(obj, node, args: T.Tuple[str, T.Optional[str]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string'], None) + self.assertEqual(str(cm.exception), 'foo takes at least 2 arguments, but got 1.') + + def test_typed_pos_args_min_optargs_max_exceeded(self) -> None: + @typed_pos_args('foo', str, optargs=[str]) + def _(obj, node, args: T.Tuple[str, T.Optional[str]], kwargs) -> None: + self.assertTrue(False) # should not be reachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), ['string', '1', '2'], None) + self.assertEqual(str(cm.exception), 'foo takes at most 2 arguments, but got 3.') + + def test_typed_pos_args_optargs_not_given(self) -> None: + @typed_pos_args('foo', str, optargs=[str]) + def _(obj, node, args: T.Tuple[str, T.Optional[str]], kwargs) -> None: + self.assertEqual(len(args), 2) + self.assertIsInstance(args[0], str) + self.assertEqual(args[0], 'string') + self.assertIsNone(args[1]) + + _(None, mock.Mock(), ['string'], None) + + def test_typed_pos_args_optargs_some_given(self) -> None: + @typed_pos_args('foo', str, optargs=[str, int]) + def _(obj, node, args: T.Tuple[str, T.Optional[str], T.Optional[int]], kwargs) -> None: + self.assertEqual(len(args), 3) + self.assertIsInstance(args[0], str) + self.assertEqual(args[0], 'string') + self.assertIsInstance(args[1], str) + self.assertEqual(args[1], '1') + self.assertIsNone(args[2]) + + _(None, mock.Mock(), ['string', '1'], None) + + def test_typed_pos_args_optargs_all_given(self) -> None: + @typed_pos_args('foo', str, optargs=[str]) + def _(obj, node, args: T.Tuple[str, T.Optional[str]], kwargs) -> None: + self.assertEqual(len(args), 2) + self.assertIsInstance(args[0], str) + self.assertEqual(args[0], 'string') + self.assertIsInstance(args[1], str) + + _(None, mock.Mock(), ['string', '1'], None) + + def test_typed_kwarg_basic(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', str) + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None: + self.assertIsInstance(kwargs['input'], str) + self.assertEqual(kwargs['input'], 'foo') + + _(None, mock.Mock(), [], {'input': 'foo'}) + + def test_typed_kwarg_missing_required(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', str, required=True), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None: + self.assertTrue(False) # should be unreachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), [], {}) + self.assertEqual(str(cm.exception), 'testfunc is missing required keyword argument "input"') + + def test_typed_kwarg_missing_optional(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', str), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.Optional[str]]) -> None: + self.assertIsNone(kwargs['input']) + + _(None, mock.Mock(), [], {}) + + def test_typed_kwarg_default(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', str, default='default'), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None: + self.assertEqual(kwargs['input'], 'default') + + _(None, mock.Mock(), [], {}) + + def test_typed_kwarg_container_valid(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(list, str), required=True), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.List[str]]) -> None: + self.assertEqual(kwargs['input'], ['str']) + + _(None, mock.Mock(), [], {'input': ['str']}) + + def test_typed_kwarg_container_invalid(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(list, str), required=True), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.List[str]]) -> None: + self.assertTrue(False) # should be unreachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), [], {'input': {}}) + self.assertEqual(str(cm.exception), 'testfunc keyword argument "input" container type was "dict", but should have been "list"') + + def test_typed_kwarg_contained_invalid(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(dict, str), required=True), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.Dict[str, str]]) -> None: + self.assertTrue(False) # should be unreachable + + with self.assertRaises(InvalidArguments) as cm: + _(None, mock.Mock(), [], {'input': {'key': 1}}) + self.assertEqual(str(cm.exception), 'testfunc keyword argument "input" contained a value of type "int" but should have been "str"') + + def test_typed_kwarg_container_listify(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(list, str), listify=True), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.List[str]]) -> None: + self.assertEqual(kwargs['input'], ['str']) + + _(None, mock.Mock(), [], {'input': 'str'}) + + def test_typed_kwarg_container_default_copy(self) -> None: + default: T.List[str] = [] + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(list, str), listify=True, default=default), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.List[str]]) -> None: + self.assertIsNot(kwargs['input'], default) + + _(None, mock.Mock(), [], {}) + + def test_typed_kwarg_container_pairs(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(list, str, pairs=True), listify=True), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, T.List[str]]) -> None: + self.assertEqual(kwargs['input'], ['a', 'b']) + + _(None, mock.Mock(), [], {'input': ['a', 'b']}) + + with self.assertRaises(MesonException) as cm: + _(None, mock.Mock(), [], {'input': ['a']}) + self.assertEqual(str(cm.exception), "testfunc keyword argument \"input\" container should be of even length, but is not") + + @mock.patch.dict(mesonbuild.mesonlib.project_meson_versions, {}) + def test_typed_kwarg_since(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', str, since='1.0', deprecated='2.0') + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None: + self.assertIsInstance(kwargs['input'], str) + self.assertEqual(kwargs['input'], 'foo') + + with mock.patch('sys.stdout', io.StringIO()) as out: + # With Meson 0.1 it should trigger the "introduced" warning but not the "deprecated" warning + mesonbuild.mesonlib.project_meson_versions[''] = '0.1' + _(None, mock.Mock(subproject=''), [], {'input': 'foo'}) + self.assertRegex(out.getvalue(), r'WARNING:.*introduced.*input arg in testfunc') + self.assertNotRegex(out.getvalue(), r'WARNING:.*deprecated.*input arg in testfunc') + + with mock.patch('sys.stdout', io.StringIO()) as out: + # With Meson 1.5 it shouldn't trigger any warning + mesonbuild.mesonlib.project_meson_versions[''] = '1.5' + _(None, mock.Mock(subproject=''), [], {'input': 'foo'}) + self.assertNotRegex(out.getvalue(), r'WARNING:.*') + self.assertNotRegex(out.getvalue(), r'WARNING:.*') + + with mock.patch('sys.stdout', io.StringIO()) as out: + # With Meson 2.0 it should trigger the "deprecated" warning but not the "introduced" warning + mesonbuild.mesonlib.project_meson_versions[''] = '2.0' + _(None, mock.Mock(subproject=''), [], {'input': 'foo'}) + self.assertRegex(out.getvalue(), r'WARNING:.*deprecated.*input arg in testfunc') + self.assertNotRegex(out.getvalue(), r'WARNING:.*introduced.*input arg in testfunc') + + def test_typed_kwarg_validator(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', str, validator=lambda x: 'invalid!' if x != 'foo' else None) + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None: + pass + + # Should be valid + _(None, mock.Mock(), tuple(), dict(input='foo')) + + with self.assertRaises(MesonException) as cm: + _(None, mock.Mock(), tuple(), dict(input='bar')) + self.assertEqual(str(cm.exception), "testfunc keyword argument \"input\" invalid!") + + def test_typed_kwarg_convertor(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('native', bool, convertor=lambda n: MachineChoice.BUILD if n else MachineChoice.HOST) + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, MachineChoice]) -> None: + assert isinstance(kwargs['native'], MachineChoice) + + _(None, mock.Mock(), tuple(), dict(native=True)) + + @mock.patch.dict(mesonbuild.mesonlib.project_meson_versions, {'': '1.0'}) + def test_typed_kwarg_since_values(self) -> None: + @typed_kwargs( + 'testfunc', + KwargInfo('input', ContainerTypeInfo(list, str), listify=True, default=[], deprecated_values={'foo': '0.9'}, since_values={'bar': '1.1'}), + KwargInfo('output', ContainerTypeInfo(dict, str), default={}, deprecated_values={'foo': '0.9'}, since_values={'bar': '1.1'}), + KwargInfo( + 'mode', str, + validator=lambda x: 'Should be one of "clean", "build", "rebuild"' if x not in {'clean', 'build', 'rebuild', 'deprecated', 'since'} else None, + deprecated_values={'deprecated': '1.0'}, + since_values={'since': '1.1'}), + ) + def _(obj, node, args: T.Tuple, kwargs: T.Dict[str, str]) -> None: + pass + + with mock.patch('sys.stdout', io.StringIO()) as out: + _(None, mock.Mock(subproject=''), [], {'input': ['foo']}) + self.assertRegex(out.getvalue(), r"""WARNING:.Project targeting '1.0'.*deprecated since '0.9': "testfunc" keyword argument "input" value "foo".*""") + + with mock.patch('sys.stdout', io.StringIO()) as out: + _(None, mock.Mock(subproject=''), [], {'input': ['bar']}) + self.assertRegex(out.getvalue(), r"""WARNING:.Project targeting '1.0'.*introduced in '1.1': "testfunc" keyword argument "input" value "bar".*""") + + with mock.patch('sys.stdout', io.StringIO()) as out: + _(None, mock.Mock(subproject=''), [], {'output': {'foo': 'a'}}) + self.assertRegex(out.getvalue(), r"""WARNING:.Project targeting '1.0'.*deprecated since '0.9': "testfunc" keyword argument "output" value "foo".*""") + + with mock.patch('sys.stdout', io.StringIO()) as out: + _(None, mock.Mock(subproject=''), [], {'output': {'bar': 'b'}}) + self.assertRegex(out.getvalue(), r"""WARNING:.Project targeting '1.0'.*introduced in '1.1': "testfunc" keyword argument "output" value "bar".*""") + + with mock.patch('sys.stdout', io.StringIO()) as out: + _(None, mock.Mock(subproject=''), [], {'mode': 'deprecated'}) + self.assertRegex(out.getvalue(), r"""WARNING:.Project targeting '1.0'.*deprecated since '1.0': "testfunc" keyword argument "mode" value "deprecated".*""") + + with mock.patch('sys.stdout', io.StringIO()) as out: + _(None, mock.Mock(subproject=''), [], {'mode': 'since'}) + self.assertRegex(out.getvalue(), r"""WARNING:.Project targeting '1.0'.*introduced in '1.1': "testfunc" keyword argument "mode" value "since".*""") + + def test_typed_kwarg_evolve(self) -> None: + k = KwargInfo('foo', str, required=True, default='foo') + v = k.evolve(default='bar') + self.assertEqual(k.name, 'foo') + self.assertEqual(k.name, v.name) + self.assertEqual(k.types, str) + self.assertEqual(k.types, v.types) + self.assertEqual(k.required, True) + self.assertEqual(k.required, v.required) + self.assertEqual(k.default, 'foo') + self.assertEqual(v.default, 'bar') + + def test_detect_cpu_family(self) -> None: + """Test the various cpu familes that we detect and normalize. + + This is particularly useful as both documentation, and to keep testing + platforms that are less common. + """ + + @contextmanager + def mock_trial(value: str) -> T.Iterable[None]: + """Mock all of the ways we could get the trial at once.""" + mocked = mock.Mock(return_value=value) + + with mock.patch('mesonbuild.environment.detect_windows_arch', mocked), \ + mock.patch('mesonbuild.environment.platform.processor', mocked), \ + mock.patch('mesonbuild.environment.platform.machine', mocked): + yield + + cases = [ + ('x86', 'x86'), + ('i386', 'x86'), + ('bepc', 'x86'), # Haiku + ('earm', 'arm'), # NetBSD + ('arm', 'arm'), + ('ppc64', 'ppc64'), + ('powerpc64', 'ppc64'), + ('powerpc', 'ppc'), + ('ppc', 'ppc'), + ('macppc', 'ppc'), + ('power macintosh', 'ppc'), + ('mips64el', 'mips64'), + ('mips64', 'mips64'), + ('mips', 'mips'), + ('mipsel', 'mips'), + ('ip30', 'mips64'), + ('ip35', 'mips64'), + ('parisc64', 'parisc'), + ('sun4u', 'sparc64'), + ('sun4v', 'sparc64'), + ('amd64', 'x86_64'), + ('x64', 'x86_64'), + ('i86pc', 'x86_64'), # Solaris + ('aarch64', 'aarch64'), + ('aarch64_be', 'aarch64'), + ] + + with mock.patch('mesonbuild.environment.any_compiler_has_define', mock.Mock(return_value=False)): + for test, expected in cases: + with self.subTest(test, has_define=False), mock_trial(test): + actual = mesonbuild.environment.detect_cpu_family({}) + self.assertEqual(actual, expected) + + with mock.patch('mesonbuild.environment.any_compiler_has_define', mock.Mock(return_value=True)): + for test, expected in [('x86_64', 'x86'), ('aarch64', 'arm'), ('ppc', 'ppc64')]: + with self.subTest(test, has_define=True), mock_trial(test): + actual = mesonbuild.environment.detect_cpu_family({}) + self.assertEqual(actual, expected) + + def test_detect_cpu(self) -> None: + + @contextmanager + def mock_trial(value: str) -> T.Iterable[None]: + """Mock all of the ways we could get the trial at once.""" + mocked = mock.Mock(return_value=value) + + with mock.patch('mesonbuild.environment.detect_windows_arch', mocked), \ + mock.patch('mesonbuild.environment.platform.processor', mocked), \ + mock.patch('mesonbuild.environment.platform.machine', mocked): + yield + + cases = [ + ('amd64', 'x86_64'), + ('x64', 'x86_64'), + ('i86pc', 'x86_64'), + ('earm', 'arm'), + ('mips64el', 'mips64'), + ('mips64', 'mips64'), + ('mips', 'mips'), + ('mipsel', 'mips'), + ('aarch64', 'aarch64'), + ('aarch64_be', 'aarch64'), + ] + + with mock.patch('mesonbuild.environment.any_compiler_has_define', mock.Mock(return_value=False)): + for test, expected in cases: + with self.subTest(test, has_define=False), mock_trial(test): + actual = mesonbuild.environment.detect_cpu({}) + self.assertEqual(actual, expected) + + with mock.patch('mesonbuild.environment.any_compiler_has_define', mock.Mock(return_value=True)): + for test, expected in [('x86_64', 'i686'), ('aarch64', 'arm'), ('ppc', 'ppc64')]: + with self.subTest(test, has_define=True), mock_trial(test): + actual = mesonbuild.environment.detect_cpu({}) + self.assertEqual(actual, expected) + + +@unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release') +class DataTests(unittest.TestCase): + + def test_snippets(self): + hashcounter = re.compile('^ *(#)+') + snippet_dir = Path('docs/markdown/snippets') + self.assertTrue(snippet_dir.is_dir()) + for f in snippet_dir.glob('*'): + self.assertTrue(f.is_file()) + if f.parts[-1].endswith('~'): + continue + if f.suffix == '.md': + in_code_block = False + with f.open(encoding='utf-8') as snippet: + for line in snippet: + if line.startswith(' '): + continue + if line.startswith('```'): + in_code_block = not in_code_block + if in_code_block: + continue + m = re.match(hashcounter, line) + if m: + self.assertEqual(len(m.group(0)), 2, 'All headings in snippets must have two hash symbols: ' + f.name) + self.assertFalse(in_code_block, 'Unclosed code block.') + else: + if f.name != 'add_release_note_snippets_here': + self.assertTrue(False, 'A file without .md suffix in snippets dir: ' + f.name) + + def test_compiler_options_documented(self): + ''' + Test that C and C++ compiler options and base options are documented in + Builtin-Options.md. Only tests the default compiler for the current + platform on the CI. + ''' + md = None + with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f: + md = f.read() + self.assertIsNotNone(md) + env = get_fake_env() + # FIXME: Support other compilers + cc = detect_c_compiler(env, MachineChoice.HOST) + cpp = detect_cpp_compiler(env, MachineChoice.HOST) + for comp in (cc, cpp): + for opt in comp.get_options(): + self.assertIn(str(opt), md) + for opt in comp.base_options: + self.assertIn(str(opt), md) + self.assertNotIn('b_unknown', md) + + @staticmethod + def _get_section_content(name, sections, md): + for section in sections: + if section and section.group(1) == name: + try: + next_section = next(sections) + end = next_section.start() + except StopIteration: + end = len(md) + # Extract the content for this section + return md[section.end():end] + raise RuntimeError(f'Could not find "{name}" heading') + + def test_builtin_options_documented(self): + ''' + Test that universal options and base options are documented in + Builtin-Options.md. + ''' + from itertools import tee + md = None + with open('docs/markdown/Builtin-options.md', encoding='utf-8') as f: + md = f.read() + self.assertIsNotNone(md) + + found_entries = set() + sections = re.finditer(r"^## (.+)$", md, re.MULTILINE) + # Extract the content for this section + content = self._get_section_content("Universal options", sections, md) + subsections = tee(re.finditer(r"^### (.+)$", content, re.MULTILINE)) + subcontent1 = self._get_section_content("Directories", subsections[0], content) + subcontent2 = self._get_section_content("Core options", subsections[1], content) + for subcontent in (subcontent1, subcontent2): + # Find the option names + options = set() + # Match either a table row or a table heading separator: | ------ | + rows = re.finditer(r"^\|(?: (\w+) .* | *-+ *)\|", subcontent, re.MULTILINE) + # Skip the header of the first table + next(rows) + # Skip the heading separator of the first table + next(rows) + for m in rows: + value = m.group(1) + # End when the `buildtype` table starts + if value is None: + break + options.add(value) + self.assertEqual(len(found_entries & options), 0) + found_entries |= options + + self.assertEqual(found_entries, { + *[str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS], + *[str(k) for k in mesonbuild.coredata.BUILTIN_OPTIONS_PER_MACHINE], + }) + + # Check that `buildtype` table inside `Core options` matches how + # setting of builtin options behaves + # + # Find all tables inside this subsection + tables = re.finditer(r"^\| (\w+) .* \|\n\| *[-|\s]+ *\|$", subcontent2, re.MULTILINE) + # Get the table we want using the header of the first column + table = self._get_section_content('buildtype', tables, subcontent2) + # Get table row data + rows = re.finditer(r"^\|(?: (\w+)\s+\| (\w+)\s+\| (\w+) .* | *-+ *)\|", table, re.MULTILINE) + env = get_fake_env() + for m in rows: + buildtype, debug, opt = m.groups() + if debug == 'true': + debug = True + elif debug == 'false': + debug = False + else: + raise RuntimeError(f'Invalid debug value {debug!r} in row:\n{m.group()}') + env.coredata.set_option(OptionKey('buildtype'), buildtype) + self.assertEqual(env.coredata.options[OptionKey('buildtype')].value, buildtype) + self.assertEqual(env.coredata.options[OptionKey('optimization')].value, opt) + self.assertEqual(env.coredata.options[OptionKey('debug')].value, debug) + + def test_cpu_families_documented(self): + with open("docs/markdown/Reference-tables.md", encoding='utf-8') as f: + md = f.read() + self.assertIsNotNone(md) + + sections = re.finditer(r"^## (.+)$", md, re.MULTILINE) + content = self._get_section_content("CPU families", sections, md) + # Find the list entries + arches = [m.group(1) for m in re.finditer(r"^\| (\w+) +\|", content, re.MULTILINE)] + # Drop the header + arches = set(arches[1:]) + self.assertEqual(arches, set(mesonbuild.environment.known_cpu_families)) + + def test_markdown_files_in_sitemap(self): + ''' + Test that each markdown files in docs/markdown is referenced in sitemap.txt + ''' + with open("docs/sitemap.txt", encoding='utf-8') as f: + md = f.read() + self.assertIsNotNone(md) + toc = list(m.group(1) for m in re.finditer(r"^\s*(\w.*)$", md, re.MULTILINE)) + markdownfiles = [f.name for f in Path("docs/markdown").iterdir() if f.is_file() and f.suffix == '.md'] + exceptions = ['_Sidebar.md'] + for f in markdownfiles: + if f not in exceptions and not f.startswith('_include'): + self.assertIn(f, toc) + + def test_modules_in_navbar(self): + ''' + Test that each module is referenced in navbar_links.html + ''' + with open("docs/theme/extra/templates/navbar_links.html", encoding='utf-8') as f: + html = f.read().lower() + self.assertIsNotNone(html) + for f in Path('mesonbuild/modules').glob('*.py'): + if f.name in {'modtest.py', 'qt.py', '__init__.py'}: + continue + name = f'{f.stem}-module.html' + name = name.replace('unstable_', '') + name = name.replace('python3', 'python-3') + name = name.replace('_', '-') + self.assertIn(name, html) + + def test_vim_syntax_highlighting(self): + ''' + Ensure that vim syntax highlighting files were updated for new + functions in the global namespace in build files. + ''' + env = get_fake_env() + interp = Interpreter(FakeBuild(env), mock=True) + with open('data/syntax-highlighting/vim/syntax/meson.vim', encoding='utf-8') as f: + res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE) + defined = set([a.strip() for a in res.group().split('\\')][1:]) + self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys()))) + + def test_all_functions_defined_in_ast_interpreter(self): + ''' + Ensure that the all functions defined in the Interpreter are also defined + in the AstInterpreter (and vice versa). + ''' + env = get_fake_env() + interp = Interpreter(FakeBuild(env), mock=True) + astint = AstInterpreter('.', '', '') + self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys())) + + def test_mesondata_is_up_to_date(self): + from mesonbuild.mesondata import mesondata + err_msg = textwrap.dedent(''' + + ########################################################### + ### mesonbuild.mesondata is not up-to-date ### + ### Please regenerate it by running tools/gen_data.py ### + ########################################################### + + ''') + + root_dir = Path(__file__).resolve().parent + mesonbuild_dir = root_dir / 'mesonbuild' + + data_dirs = mesonbuild_dir.glob('**/data') + data_files = [] # type: T.List[T.Tuple(str, str)] + + for i in data_dirs: + for p in i.iterdir(): + data_files += [(p.relative_to(mesonbuild_dir).as_posix(), hashlib.sha256(p.read_bytes()).hexdigest())] + + current_files = set(mesondata.keys()) + scanned_files = {x[0] for x in data_files} + + self.assertSetEqual(current_files, scanned_files, err_msg + 'Data files were added or removed\n') + errors = [] + for i in data_files: + if mesondata[i[0]].sha256sum != i[1]: + errors += [i[0]] + + self.assertListEqual(errors, [], err_msg + 'Files were changed') + +class BasePlatformTests(unittest.TestCase): + prefix = '/usr' + libdir = 'lib' + + def setUp(self): + super().setUp() + self.maxDiff = None + src_root = os.path.dirname(__file__) + src_root = os.path.join(os.getcwd(), src_root) + self.src_root = src_root + # Get the backend + self.backend = getattr(Backend, os.environ['MESON_UNIT_TEST_BACKEND']) + self.meson_args = ['--backend=' + self.backend.name] + self.meson_native_file = None + self.meson_cross_file = None + self.meson_command = python_command + [get_meson_script()] + self.setup_command = self.meson_command + self.meson_args + self.mconf_command = self.meson_command + ['configure'] + self.mintro_command = self.meson_command + ['introspect'] + self.wrap_command = self.meson_command + ['wrap'] + self.rewrite_command = self.meson_command + ['rewrite'] + # Backend-specific build commands + self.build_command, self.clean_command, self.test_command, self.install_command, \ + self.uninstall_command = get_backend_commands(self.backend) + # Test directories + self.common_test_dir = os.path.join(src_root, 'test cases/common') + self.vala_test_dir = os.path.join(src_root, 'test cases/vala') + self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks') + self.unit_test_dir = os.path.join(src_root, 'test cases/unit') + self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite') + self.linuxlike_test_dir = os.path.join(src_root, 'test cases/linuxlike') + self.objc_test_dir = os.path.join(src_root, 'test cases/objc') + self.objcpp_test_dir = os.path.join(src_root, 'test cases/objcpp') + + # Misc stuff + self.orig_env = os.environ.copy() + if self.backend is Backend.ninja: + self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do'] + else: + # VS doesn't have a stable output when no changes are done + # XCode backend is untested with unit tests, help welcome! + self.no_rebuild_stdout = [f'UNKNOWN BACKEND {self.backend.name!r}'] + + self.builddirs = [] + self.new_builddir() + + def change_builddir(self, newdir): + self.builddir = newdir + self.privatedir = os.path.join(self.builddir, 'meson-private') + self.logdir = os.path.join(self.builddir, 'meson-logs') + self.installdir = os.path.join(self.builddir, 'install') + self.distdir = os.path.join(self.builddir, 'meson-dist') + self.mtest_command = self.meson_command + ['test', '-C', self.builddir] + self.builddirs.append(self.builddir) + + def new_builddir(self): + if not is_cygwin(): + # Keep builddirs inside the source tree so that virus scanners + # don't complain + newdir = tempfile.mkdtemp(dir=os.getcwd()) + else: + # But not on Cygwin because that breaks the umask tests. See: + # https://github.com/mesonbuild/meson/pull/5546#issuecomment-509666523 + newdir = tempfile.mkdtemp() + # In case the directory is inside a symlinked directory, find the real + # path otherwise we might not find the srcdir from inside the builddir. + newdir = os.path.realpath(newdir) + self.change_builddir(newdir) + + def _print_meson_log(self): + log = os.path.join(self.logdir, 'meson-log.txt') + if not os.path.isfile(log): + print(f"{log!r} doesn't exist") + return + with open(log, encoding='utf-8') as f: + print(f.read()) + + def tearDown(self): + for path in self.builddirs: + try: + windows_proof_rmtree(path) + except FileNotFoundError: + pass + os.environ.clear() + os.environ.update(self.orig_env) + super().tearDown() + + def _run(self, command, *, workdir=None, override_envvars=None): + ''' + Run a command while printing the stdout and stderr to stdout, + and also return a copy of it + ''' + # If this call hangs CI will just abort. It is very hard to distinguish + # between CI issue and test bug in that case. Set timeout and fail loud + # instead. + if override_envvars is None: + env = None + else: + env = os.environ.copy() + env.update(override_envvars) + + p = subprocess.run(command, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, env=env, + encoding='utf-8', + universal_newlines=True, cwd=workdir, timeout=60 * 5) + print(p.stdout) + if p.returncode != 0: + if 'MESON_SKIP_TEST' in p.stdout: + raise unittest.SkipTest('Project requested skipping.') + raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout) + return p.stdout + + def init(self, srcdir, *, + extra_args=None, + default_args=True, + inprocess=False, + override_envvars=None, + workdir=None): + self.assertPathExists(srcdir) + if extra_args is None: + extra_args = [] + if not isinstance(extra_args, list): + extra_args = [extra_args] + args = [srcdir, self.builddir] + if default_args: + args += ['--prefix', self.prefix] + if self.libdir: + args += ['--libdir', self.libdir] + if self.meson_native_file: + args += ['--native-file', self.meson_native_file] + if self.meson_cross_file: + args += ['--cross-file', self.meson_cross_file] + self.privatedir = os.path.join(self.builddir, 'meson-private') + if inprocess: + try: + (returncode, out, err) = run_configure_inprocess(self.meson_args + args + extra_args, override_envvars) + if 'MESON_SKIP_TEST' in out: + raise unittest.SkipTest('Project requested skipping.') + if returncode != 0: + self._print_meson_log() + print('Stdout:\n') + print(out) + print('Stderr:\n') + print(err) + raise RuntimeError('Configure failed') + except Exception: + self._print_meson_log() + raise + finally: + # Close log file to satisfy Windows file locking + mesonbuild.mlog.shutdown() + mesonbuild.mlog.log_dir = None + mesonbuild.mlog.log_file = None + else: + try: + out = self._run(self.setup_command + args + extra_args, override_envvars=override_envvars, workdir=workdir) + except unittest.SkipTest: + raise unittest.SkipTest('Project requested skipping: ' + srcdir) + except Exception: + self._print_meson_log() + raise + return out + + def build(self, target=None, *, extra_args=None, override_envvars=None): + if extra_args is None: + extra_args = [] + # Add arguments for building the target (if specified), + # and using the build dir (if required, with VS) + args = get_builddir_target_args(self.backend, self.builddir, target) + return self._run(self.build_command + args + extra_args, workdir=self.builddir, override_envvars=override_envvars) + + def clean(self, *, override_envvars=None): + dir_args = get_builddir_target_args(self.backend, self.builddir, None) + self._run(self.clean_command + dir_args, workdir=self.builddir, override_envvars=override_envvars) + + def run_tests(self, *, inprocess=False, override_envvars=None): + if not inprocess: + self._run(self.test_command, workdir=self.builddir, override_envvars=override_envvars) + else: + with mock.patch.dict(os.environ, override_envvars): + run_mtest_inprocess(['-C', self.builddir]) + + def install(self, *, use_destdir=True, override_envvars=None): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'{self.backend.name!r} backend can\'t install files') + if use_destdir: + destdir = {'DESTDIR': self.installdir} + if override_envvars is None: + override_envvars = destdir + else: + override_envvars.update(destdir) + self._run(self.install_command, workdir=self.builddir, override_envvars=override_envvars) + + def uninstall(self, *, override_envvars=None): + self._run(self.uninstall_command, workdir=self.builddir, override_envvars=override_envvars) + + def run_target(self, target, *, override_envvars=None): + ''' + Run a Ninja target while printing the stdout and stderr to stdout, + and also return a copy of it + ''' + return self.build(target=target, override_envvars=override_envvars) + + def setconf(self, arg, will_build=True): + if not isinstance(arg, list): + arg = [arg] + if will_build: + ensure_backend_detects_changes(self.backend) + self._run(self.mconf_command + arg + [self.builddir]) + + def wipe(self): + windows_proof_rmtree(self.builddir) + + def utime(self, f): + ensure_backend_detects_changes(self.backend) + os.utime(f) + + def get_compdb(self): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'Compiler db not available with {self.backend.name} backend') + try: + with open(os.path.join(self.builddir, 'compile_commands.json'), encoding='utf-8') as ifile: + contents = json.load(ifile) + except FileNotFoundError: + raise unittest.SkipTest('Compiler db not found') + # If Ninja is using .rsp files, generate them, read their contents, and + # replace it as the command for all compile commands in the parsed json. + if len(contents) > 0 and contents[0]['command'].endswith('.rsp'): + # Pretend to build so that the rsp files are generated + self.build(extra_args=['-d', 'keeprsp', '-n']) + for each in contents: + # Extract the actual command from the rsp file + compiler, rsp = each['command'].split(' @') + rsp = os.path.join(self.builddir, rsp) + # Replace the command with its contents + with open(rsp, encoding='utf-8') as f: + each['command'] = compiler + ' ' + f.read() + return contents + + def get_meson_log(self): + with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt'), encoding='utf-8') as f: + return f.readlines() + + def get_meson_log_compiler_checks(self): + ''' + Fetch a list command-lines run by meson for compiler checks. + Each command-line is returned as a list of arguments. + ''' + log = self.get_meson_log() + prefix = 'Command line:' + cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)] + return cmds + + def get_meson_log_sanitychecks(self): + ''' + Same as above, but for the sanity checks that were run + ''' + log = self.get_meson_log() + prefix = 'Sanity check compiler command line:' + cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)] + return cmds + + def introspect(self, args): + if isinstance(args, str): + args = [args] + out = subprocess.check_output(self.mintro_command + args + [self.builddir], + universal_newlines=True) + return json.loads(out) + + def introspect_directory(self, directory, args): + if isinstance(args, str): + args = [args] + out = subprocess.check_output(self.mintro_command + args + [directory], + universal_newlines=True) + try: + obj = json.loads(out) + except Exception as e: + print(out) + raise e + return obj + + def assertPathEqual(self, path1, path2): + ''' + Handles a lot of platform-specific quirks related to paths such as + separator, case-sensitivity, etc. + ''' + self.assertEqual(PurePath(path1), PurePath(path2)) + + def assertPathListEqual(self, pathlist1, pathlist2): + self.assertEqual(len(pathlist1), len(pathlist2)) + worklist = list(zip(pathlist1, pathlist2)) + for i in worklist: + if i[0] is None: + self.assertEqual(i[0], i[1]) + else: + self.assertPathEqual(i[0], i[1]) + + def assertPathBasenameEqual(self, path, basename): + msg = f'{path!r} does not end with {basename!r}' + # We cannot use os.path.basename because it returns '' when the path + # ends with '/' for some silly reason. This is not how the UNIX utility + # `basename` works. + path_basename = PurePath(path).parts[-1] + self.assertEqual(PurePath(path_basename), PurePath(basename), msg) + + def assertReconfiguredBuildIsNoop(self): + 'Assert that we reconfigured and then there was nothing to do' + ret = self.build() + self.assertIn('The Meson build system', ret) + if self.backend is Backend.ninja: + for line in ret.split('\n'): + if line in self.no_rebuild_stdout: + break + else: + raise AssertionError('build was reconfigured, but was not no-op') + elif self.backend is Backend.vs: + # Ensure that some target said that no rebuild was done + # XXX: Note CustomBuild did indeed rebuild, because of the regen checker! + self.assertIn('ClCompile:\n All outputs are up-to-date.', ret) + self.assertIn('Link:\n All outputs are up-to-date.', ret) + # Ensure that no targets were built + self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)) + self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)) + elif self.backend is Backend.xcode: + raise unittest.SkipTest('Please help us fix this test on the xcode backend') + else: + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') + + def assertBuildIsNoop(self): + ret = self.build() + if self.backend is Backend.ninja: + self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout) + elif self.backend is Backend.vs: + # Ensure that some target of each type said that no rebuild was done + # We always have at least one CustomBuild target for the regen checker + self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret) + self.assertIn('ClCompile:\n All outputs are up-to-date.', ret) + self.assertIn('Link:\n All outputs are up-to-date.', ret) + # Ensure that no targets were built + self.assertNotRegex(ret, re.compile('CustomBuild:\n [^\n]*cl', flags=re.IGNORECASE)) + self.assertNotRegex(ret, re.compile('ClCompile:\n [^\n]*cl', flags=re.IGNORECASE)) + self.assertNotRegex(ret, re.compile('Link:\n [^\n]*link', flags=re.IGNORECASE)) + elif self.backend is Backend.xcode: + raise unittest.SkipTest('Please help us fix this test on the xcode backend') + else: + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') + + def assertRebuiltTarget(self, target): + ret = self.build() + if self.backend is Backend.ninja: + self.assertIn(f'Linking target {target}', ret) + elif self.backend is Backend.vs: + # Ensure that this target was rebuilt + linkre = re.compile('Link:\n [^\n]*link[^\n]*' + target, flags=re.IGNORECASE) + self.assertRegex(ret, linkre) + elif self.backend is Backend.xcode: + raise unittest.SkipTest('Please help us fix this test on the xcode backend') + else: + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') + + @staticmethod + def get_target_from_filename(filename): + base = os.path.splitext(filename)[0] + if base.startswith(('lib', 'cyg')): + return base[3:] + return base + + def assertBuildRelinkedOnlyTarget(self, target): + ret = self.build() + if self.backend is Backend.ninja: + linked_targets = [] + for line in ret.split('\n'): + if 'Linking target' in line: + fname = line.rsplit('target ')[-1] + linked_targets.append(self.get_target_from_filename(fname)) + self.assertEqual(linked_targets, [target]) + elif self.backend is Backend.vs: + # Ensure that this target was rebuilt + linkre = re.compile(r'Link:\n [^\n]*link.exe[^\n]*/OUT:".\\([^"]*)"', flags=re.IGNORECASE) + matches = linkre.findall(ret) + self.assertEqual(len(matches), 1, msg=matches) + self.assertEqual(self.get_target_from_filename(matches[0]), target) + elif self.backend is Backend.xcode: + raise unittest.SkipTest('Please help us fix this test on the xcode backend') + else: + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') + + def assertPathExists(self, path): + m = f'Path {path!r} should exist' + self.assertTrue(os.path.exists(path), msg=m) + + def assertPathDoesNotExist(self, path): + m = f'Path {path!r} should not exist' + self.assertFalse(os.path.exists(path), msg=m) + + +class AllPlatformTests(BasePlatformTests): + ''' + Tests that should run on all platforms + ''' + + def test_default_options_prefix(self): + ''' + Tests that setting a prefix in default_options in project() works. + Can't be an ordinary test because we pass --prefix to meson there. + https://github.com/mesonbuild/meson/issues/1349 + ''' + testdir = os.path.join(self.common_test_dir, '87 default options') + self.init(testdir, default_args=False, inprocess=True) + opts = self.introspect('--buildoptions') + for opt in opts: + if opt['name'] == 'prefix': + prefix = opt['value'] + break + else: + raise self.fail('Did not find option "prefix"') + self.assertEqual(prefix, '/absoluteprefix') + + def test_invalid_option_names(self): + interp = OptionInterpreter('') + + def write_file(code: str): + with tempfile.NamedTemporaryFile('w', dir=self.builddir, encoding='utf-8', delete=False) as f: + f.write(code) + return f.name + + fname = write_file("option('default_library', type: 'string')") + self.assertRaisesRegex(OptionException, 'Option name default_library is reserved.', + interp.process, fname) + + fname = write_file("option('c_anything', type: 'string')") + self.assertRaisesRegex(OptionException, 'Option name c_anything is reserved.', + interp.process, fname) + + fname = write_file("option('b_anything', type: 'string')") + self.assertRaisesRegex(OptionException, 'Option name b_anything is reserved.', + interp.process, fname) + + fname = write_file("option('backend_anything', type: 'string')") + self.assertRaisesRegex(OptionException, 'Option name backend_anything is reserved.', + interp.process, fname) + + fname = write_file("option('foo.bar', type: 'string')") + self.assertRaisesRegex(OptionException, 'Option names can only contain letters, numbers or dashes.', + interp.process, fname) + + # platlib is allowed, only python.platlib is reserved. + fname = write_file("option('platlib', type: 'string')") + interp.process(fname) + + def test_do_conf_file_preserve_newlines(self): + + def conf_file(in_data, confdata): + with temp_filename() as fin: + with open(fin, 'wb') as fobj: + fobj.write(in_data.encode('utf-8')) + with temp_filename() as fout: + mesonbuild.mesonlib.do_conf_file(fin, fout, confdata, 'meson') + with open(fout, 'rb') as fobj: + return fobj.read().decode('utf-8') + + confdata = {'VAR': ('foo', 'bar')} + self.assertEqual(conf_file('@VAR@\n@VAR@\n', confdata), 'foo\nfoo\n') + self.assertEqual(conf_file('@VAR@\r\n@VAR@\r\n', confdata), 'foo\r\nfoo\r\n') + + def test_do_conf_file_by_format(self): + def conf_str(in_data, confdata, vformat): + (result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str('configuration_file', in_data, confdata, variable_format = vformat) + return '\n'.join(result) + + def check_formats(confdata, result): + self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'), result) + self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'), result) + self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'), result) + + confdata = ConfigurationData() + # Key error as they do not exists + check_formats(confdata, '/* #undef VAR */\n') + + # Check boolean + confdata.values = {'VAR': (False, 'description')} + check_formats(confdata, '#undef VAR\n') + confdata.values = {'VAR': (True, 'description')} + check_formats(confdata, '#define VAR\n') + + # Check string + confdata.values = {'VAR': ('value', 'description')} + check_formats(confdata, '#define VAR value\n') + + # Check integer + confdata.values = {'VAR': (10, 'description')} + check_formats(confdata, '#define VAR 10\n') + + # Check multiple string with cmake formats + confdata.values = {'VAR': ('value', 'description')} + self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value\n') + self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value') + self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value\n') + self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value') + + # Handles meson format exceptions + # Unknown format + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'unknown_format') + # More than 2 params in mesondefine + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'meson') + # Mismatched line with format + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#cmakedefine VAR'], confdata, 'meson') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake@') + # Dict value in confdata + confdata.values = {'VAR': (['value'], 'description')} + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson') + + def test_absolute_prefix_libdir(self): + ''' + Tests that setting absolute paths for --prefix and --libdir work. Can't + be an ordinary test because these are set via the command-line. + https://github.com/mesonbuild/meson/issues/1341 + https://github.com/mesonbuild/meson/issues/1345 + ''' + testdir = os.path.join(self.common_test_dir, '87 default options') + # on Windows, /someabs is *not* an absolute path + prefix = 'x:/someabs' if is_windows() else '/someabs' + libdir = 'libdir' + extra_args = ['--prefix=' + prefix, + # This can just be a relative path, but we want to test + # that passing this as an absolute path also works + '--libdir=' + prefix + '/' + libdir] + self.init(testdir, extra_args=extra_args, default_args=False) + opts = self.introspect('--buildoptions') + for opt in opts: + if opt['name'] == 'prefix': + self.assertEqual(prefix, opt['value']) + elif opt['name'] == 'libdir': + self.assertEqual(libdir, opt['value']) + + def test_libdir_must_be_inside_prefix(self): + ''' + Tests that libdir is forced to be inside prefix no matter how it is set. + Must be a unit test for obvious reasons. + ''' + testdir = os.path.join(self.common_test_dir, '1 trivial') + # libdir being inside prefix is ok + if is_windows(): + args = ['--prefix', 'x:/opt', '--libdir', 'x:/opt/lib32'] + else: + args = ['--prefix', '/opt', '--libdir', '/opt/lib32'] + self.init(testdir, extra_args=args) + self.wipe() + # libdir not being inside prefix is not ok + if is_windows(): + args = ['--prefix', 'x:/usr', '--libdir', 'x:/opt/lib32'] + else: + args = ['--prefix', '/usr', '--libdir', '/opt/lib32'] + self.assertRaises(subprocess.CalledProcessError, self.init, testdir, extra_args=args) + self.wipe() + # libdir must be inside prefix even when set via mesonconf + self.init(testdir) + if is_windows(): + self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=x:/opt', False) + else: + self.assertRaises(subprocess.CalledProcessError, self.setconf, '-Dlibdir=/opt', False) + + def test_prefix_dependent_defaults(self): + ''' + Tests that configured directory paths are set to prefix dependent + defaults. + ''' + testdir = os.path.join(self.common_test_dir, '1 trivial') + expected = { + '/opt': {'prefix': '/opt', + 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include', + 'infodir': 'share/info', + 'libexecdir': 'libexec', 'localedir': 'share/locale', + 'localstatedir': 'var', 'mandir': 'share/man', + 'sbindir': 'sbin', 'sharedstatedir': 'com', + 'sysconfdir': 'etc'}, + '/usr': {'prefix': '/usr', + 'bindir': 'bin', 'datadir': 'share', 'includedir': 'include', + 'infodir': 'share/info', + 'libexecdir': 'libexec', 'localedir': 'share/locale', + 'localstatedir': '/var', 'mandir': 'share/man', + 'sbindir': 'sbin', 'sharedstatedir': '/var/lib', + 'sysconfdir': '/etc'}, + '/usr/local': {'prefix': '/usr/local', + 'bindir': 'bin', 'datadir': 'share', + 'includedir': 'include', 'infodir': 'share/info', + 'libexecdir': 'libexec', + 'localedir': 'share/locale', + 'localstatedir': '/var/local', 'mandir': 'share/man', + 'sbindir': 'sbin', 'sharedstatedir': '/var/local/lib', + 'sysconfdir': 'etc'}, + # N.B. We don't check 'libdir' as it's platform dependent, see + # default_libdir(): + } + + if mesonbuild.mesonlib.default_prefix() == '/usr/local': + expected[None] = expected['/usr/local'] + + for prefix in expected: + args = [] + if prefix: + args += ['--prefix', prefix] + self.init(testdir, extra_args=args, default_args=False) + opts = self.introspect('--buildoptions') + for opt in opts: + name = opt['name'] + value = opt['value'] + if name in expected[prefix]: + self.assertEqual(value, expected[prefix][name]) + self.wipe() + + def test_default_options_prefix_dependent_defaults(self): + ''' + Tests that setting a prefix in default_options in project() sets prefix + dependent defaults for other options, and that those defaults can + be overridden in default_options or by the command line. + ''' + testdir = os.path.join(self.common_test_dir, '163 default options prefix dependent defaults') + expected = { + '': + {'prefix': '/usr', + 'sysconfdir': '/etc', + 'localstatedir': '/var', + 'sharedstatedir': '/sharedstate'}, + '--prefix=/usr': + {'prefix': '/usr', + 'sysconfdir': '/etc', + 'localstatedir': '/var', + 'sharedstatedir': '/sharedstate'}, + '--sharedstatedir=/var/state': + {'prefix': '/usr', + 'sysconfdir': '/etc', + 'localstatedir': '/var', + 'sharedstatedir': '/var/state'}, + '--sharedstatedir=/var/state --prefix=/usr --sysconfdir=sysconf': + {'prefix': '/usr', + 'sysconfdir': 'sysconf', + 'localstatedir': '/var', + 'sharedstatedir': '/var/state'}, + } + for args in expected: + self.init(testdir, extra_args=args.split(), default_args=False) + opts = self.introspect('--buildoptions') + for opt in opts: + name = opt['name'] + value = opt['value'] + if name in expected[args]: + self.assertEqual(value, expected[args][name]) + self.wipe() + + def test_clike_get_library_dirs(self): + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + for d in cc.get_library_dirs(env): + self.assertTrue(os.path.exists(d)) + self.assertTrue(os.path.isdir(d)) + self.assertTrue(os.path.isabs(d)) + + def test_static_library_overwrite(self): + ''' + Tests that static libraries are never appended to, always overwritten. + Has to be a unit test because this involves building a project, + reconfiguring, and building it again so that `ar` is run twice on the + same static library. + https://github.com/mesonbuild/meson/issues/1355 + ''' + testdir = os.path.join(self.common_test_dir, '3 static') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + static_linker = detect_static_linker(env, cc) + if is_windows(): + raise unittest.SkipTest('https://github.com/mesonbuild/meson/issues/1526') + if not isinstance(static_linker, mesonbuild.linkers.ArLinker): + raise unittest.SkipTest('static linker is not `ar`') + # Configure + self.init(testdir) + # Get name of static library + targets = self.introspect('--targets') + self.assertEqual(len(targets), 1) + libname = targets[0]['filename'][0] + # Build and get contents of static library + self.build() + before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split() + # Filter out non-object-file contents + before = [f for f in before if f.endswith(('.o', '.obj'))] + # Static library should contain only one object + self.assertEqual(len(before), 1, msg=before) + # Change the source to be built into the static library + self.setconf('-Dsource=libfile2.c') + self.build() + after = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split() + # Filter out non-object-file contents + after = [f for f in after if f.endswith(('.o', '.obj'))] + # Static library should contain only one object + self.assertEqual(len(after), 1, msg=after) + # and the object must have changed + self.assertNotEqual(before, after) + + def test_static_compile_order(self): + ''' + Test that the order of files in a compiler command-line while compiling + and linking statically is deterministic. This can't be an ordinary test + case because we need to inspect the compiler database. + https://github.com/mesonbuild/meson/pull/951 + ''' + testdir = os.path.join(self.common_test_dir, '5 linkstatic') + self.init(testdir) + compdb = self.get_compdb() + # Rules will get written out in this order + self.assertTrue(compdb[0]['file'].endswith("libfile.c")) + self.assertTrue(compdb[1]['file'].endswith("libfile2.c")) + self.assertTrue(compdb[2]['file'].endswith("libfile3.c")) + self.assertTrue(compdb[3]['file'].endswith("libfile4.c")) + # FIXME: We don't have access to the linker command + + def test_run_target_files_path(self): + ''' + Test that run_targets are run from the correct directory + https://github.com/mesonbuild/meson/issues/957 + ''' + testdir = os.path.join(self.common_test_dir, '51 run target') + self.init(testdir) + self.run_target('check_exists') + self.run_target('check-env') + self.run_target('check-env-ct') + + def test_run_target_subdir(self): + ''' + Test that run_targets are run from the correct directory + https://github.com/mesonbuild/meson/issues/957 + ''' + testdir = os.path.join(self.common_test_dir, '51 run target') + self.init(testdir) + self.run_target('textprinter') + + def test_install_introspection(self): + ''' + Tests that the Meson introspection API exposes install filenames correctly + https://github.com/mesonbuild/meson/issues/829 + ''' + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'{self.backend.name!r} backend can\'t install files') + testdir = os.path.join(self.common_test_dir, '8 install') + self.init(testdir) + intro = self.introspect('--targets') + if intro[0]['type'] == 'executable': + intro = intro[::-1] + self.assertPathListEqual(intro[0]['install_filename'], ['/usr/lib/libstat.a']) + self.assertPathListEqual(intro[1]['install_filename'], ['/usr/bin/prog' + exe_suffix]) + + def test_install_subdir_introspection(self): + ''' + Test that the Meson introspection API also contains subdir install information + https://github.com/mesonbuild/meson/issues/5556 + ''' + testdir = os.path.join(self.common_test_dir, '59 install subdir') + self.init(testdir) + intro = self.introspect('--installed') + expected = { + 'sub2': 'share/sub2', + 'subdir/sub1': 'share/sub1', + 'subdir/sub_elided': 'share', + 'sub1': 'share/sub1', + 'sub/sub1': 'share/sub1', + 'sub_elided': 'share', + 'nested_elided/sub': 'share', + 'new_directory': 'share/new_directory', + } + + self.assertEqual(len(intro), len(expected)) + + # Convert expected to PurePath + expected_converted = {PurePath(os.path.join(testdir, key)): PurePath(os.path.join(self.prefix, val)) for key, val in expected.items()} + intro_converted = {PurePath(key): PurePath(val) for key, val in intro.items()} + + for src, dst in expected_converted.items(): + self.assertIn(src, intro_converted) + self.assertEqual(dst, intro_converted[src]) + + def test_install_introspection_multiple_outputs(self): + ''' + Tests that the Meson introspection API exposes multiple install filenames correctly without crashing + https://github.com/mesonbuild/meson/pull/4555 + + Reverted to the first file only because of https://github.com/mesonbuild/meson/pull/4547#discussion_r244173438 + TODO Change the format to a list officially in a followup PR + ''' + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'{self.backend.name!r} backend can\'t install files') + testdir = os.path.join(self.common_test_dir, '140 custom target multiple outputs') + self.init(testdir) + intro = self.introspect('--targets') + if intro[0]['type'] == 'executable': + intro = intro[::-1] + self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh']) + self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh']) + self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None]) + self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh']) + + def test_install_log_content(self): + ''' + Tests that the install-log.txt is consistent with the installed files and directories. + Specifically checks that the log file only contains one entry per file/directory. + https://github.com/mesonbuild/meson/issues/4499 + ''' + testdir = os.path.join(self.common_test_dir, '59 install subdir') + self.init(testdir) + self.install() + installpath = Path(self.installdir) + # Find installed files and directories + expected = {installpath: 0} + for name in installpath.rglob('*'): + expected[name] = 0 + def read_logs(): + # Find logged files and directories + with Path(self.builddir, 'meson-logs', 'install-log.txt').open(encoding='utf-8') as f: + return list(map(lambda l: Path(l.strip()), + filter(lambda l: not l.startswith('#'), + f.readlines()))) + logged = read_logs() + for name in logged: + self.assertTrue(name in expected, f'Log contains extra entry {name}') + expected[name] += 1 + + for name, count in expected.items(): + self.assertGreater(count, 0, f'Log is missing entry for {name}') + self.assertLess(count, 2, f'Log has multiple entries for {name}') + + # Verify that with --dry-run we obtain the same logs but with nothing + # actually installed + windows_proof_rmtree(self.installdir) + self._run(self.meson_command + ['install', '--dry-run', '--destdir', self.installdir], workdir=self.builddir) + self.assertEqual(logged, read_logs()) + self.assertFalse(os.path.exists(self.installdir)) + + def test_uninstall(self): + exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix) + dirname = os.path.join(self.installdir, 'usr/share/dir') + testdir = os.path.join(self.common_test_dir, '8 install') + self.init(testdir) + self.assertPathDoesNotExist(exename) + self.install() + self.assertPathExists(exename) + self.uninstall() + self.assertPathDoesNotExist(exename) + self.assertPathDoesNotExist(dirname) + + def test_forcefallback(self): + testdir = os.path.join(self.unit_test_dir, '31 forcefallback') + self.init(testdir, extra_args=['--wrap-mode=forcefallback']) + self.build() + self.run_tests() + + def test_implicit_forcefallback(self): + testdir = os.path.join(self.unit_test_dir, '96 implicit force fallback') + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testdir) + self.init(testdir, extra_args=['--wrap-mode=forcefallback']) + self.new_builddir() + self.init(testdir, extra_args=['--force-fallback-for=something']) + + def test_nopromote(self): + testdir = os.path.join(self.common_test_dir, '98 subproject subdir') + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testdir, extra_args=['--wrap-mode=nopromote']) + self.assertIn('dependency subsub found: NO', cm.exception.stdout) + + def test_force_fallback_for(self): + testdir = os.path.join(self.unit_test_dir, '31 forcefallback') + self.init(testdir, extra_args=['--force-fallback-for=zlib,foo']) + self.build() + self.run_tests() + + def test_force_fallback_for_nofallback(self): + testdir = os.path.join(self.unit_test_dir, '31 forcefallback') + self.init(testdir, extra_args=['--force-fallback-for=zlib,foo', '--wrap-mode=nofallback']) + self.build() + self.run_tests() + + def test_testrepeat(self): + testdir = os.path.join(self.common_test_dir, '206 tap tests') + self.init(testdir) + self.build() + self._run(self.mtest_command + ['--repeat=2']) + + def test_testsetups(self): + if not shutil.which('valgrind'): + raise unittest.SkipTest('Valgrind not installed.') + testdir = os.path.join(self.unit_test_dir, '2 testsetups') + self.init(testdir) + self.build() + # Run tests without setup + self.run_tests() + with open(os.path.join(self.logdir, 'testlog.txt'), encoding='utf-8') as f: + basic_log = f.read() + # Run buggy test with setup that has env that will make it fail + self.assertRaises(subprocess.CalledProcessError, + self._run, self.mtest_command + ['--setup=valgrind']) + with open(os.path.join(self.logdir, 'testlog-valgrind.txt'), encoding='utf-8') as f: + vg_log = f.read() + self.assertFalse('TEST_ENV is set' in basic_log) + self.assertFalse('Memcheck' in basic_log) + self.assertTrue('TEST_ENV is set' in vg_log) + self.assertTrue('Memcheck' in vg_log) + # Run buggy test with setup without env that will pass + self._run(self.mtest_command + ['--setup=wrapper']) + # Setup with no properties works + self._run(self.mtest_command + ['--setup=empty']) + # Setup with only env works + self._run(self.mtest_command + ['--setup=onlyenv']) + self._run(self.mtest_command + ['--setup=onlyenv2']) + self._run(self.mtest_command + ['--setup=onlyenv3']) + # Setup with only a timeout works + self._run(self.mtest_command + ['--setup=timeout']) + # Setup that does not define a wrapper works with --wrapper + self._run(self.mtest_command + ['--setup=timeout', '--wrapper', shutil.which('valgrind')]) + # Setup that skips test works + self._run(self.mtest_command + ['--setup=good']) + with open(os.path.join(self.logdir, 'testlog-good.txt'), encoding='utf-8') as f: + exclude_suites_log = f.read() + self.assertFalse('buggy' in exclude_suites_log) + # --suite overrides add_test_setup(xclude_suites) + self._run(self.mtest_command + ['--setup=good', '--suite', 'buggy']) + with open(os.path.join(self.logdir, 'testlog-good.txt'), encoding='utf-8') as f: + include_suites_log = f.read() + self.assertTrue('buggy' in include_suites_log) + + def test_testsetup_selection(self): + testdir = os.path.join(self.unit_test_dir, '14 testsetup selection') + self.init(testdir) + self.build() + + # Run tests without setup + self.run_tests() + + self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=missingfromfoo']) + self._run(self.mtest_command + ['--setup=missingfromfoo', '--no-suite=foo:']) + + self._run(self.mtest_command + ['--setup=worksforall']) + self._run(self.mtest_command + ['--setup=main:worksforall']) + + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=onlyinbar']) + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:']) + self._run(self.mtest_command + ['--setup=onlyinbar', '--no-suite=main:', '--no-suite=foo:']) + self._run(self.mtest_command + ['--setup=bar:onlyinbar']) + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=foo:onlyinbar']) + self.assertRaises(subprocess.CalledProcessError, self._run, + self.mtest_command + ['--setup=main:onlyinbar']) + + def test_testsetup_default(self): + testdir = os.path.join(self.unit_test_dir, '49 testsetup default') + self.init(testdir) + self.build() + + # Run tests without --setup will cause the default setup to be used + self.run_tests() + with open(os.path.join(self.logdir, 'testlog.txt'), encoding='utf-8') as f: + default_log = f.read() + + # Run tests with explicitly using the same setup that is set as default + self._run(self.mtest_command + ['--setup=mydefault']) + with open(os.path.join(self.logdir, 'testlog-mydefault.txt'), encoding='utf-8') as f: + mydefault_log = f.read() + + # Run tests with another setup + self._run(self.mtest_command + ['--setup=other']) + with open(os.path.join(self.logdir, 'testlog-other.txt'), encoding='utf-8') as f: + other_log = f.read() + + self.assertTrue('ENV_A is 1' in default_log) + self.assertTrue('ENV_B is 2' in default_log) + self.assertTrue('ENV_C is 2' in default_log) + + self.assertTrue('ENV_A is 1' in mydefault_log) + self.assertTrue('ENV_B is 2' in mydefault_log) + self.assertTrue('ENV_C is 2' in mydefault_log) + + self.assertTrue('ENV_A is 1' in other_log) + self.assertTrue('ENV_B is 3' in other_log) + self.assertTrue('ENV_C is 2' in other_log) + + def assertFailedTestCount(self, failure_count, command): + try: + self._run(command) + self.assertEqual(0, failure_count, 'Expected %d tests to fail.' % failure_count) + except subprocess.CalledProcessError as e: + self.assertEqual(e.returncode, failure_count) + + def test_suite_selection(self): + testdir = os.path.join(self.unit_test_dir, '4 suite selection') + self.init(testdir) + self.build() + + self.assertFailedTestCount(4, self.mtest_command) + + self.assertFailedTestCount(0, self.mtest_command + ['--suite', ':success']) + self.assertFailedTestCount(3, self.mtest_command + ['--suite', ':fail']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', ':success']) + self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', ':fail']) + + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj']) + self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc']) + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail']) + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix']) + self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc']) + self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail']) + self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix']) + + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj:fail']) + self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'mainprj:success']) + self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:fail']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'mainprj:success']) + + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail:fail']) + self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjfail:success']) + self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:fail']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjfail:success']) + + self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:fail']) + self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:success']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:fail']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:success']) + + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix:fail']) + self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjmix:success']) + self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:fail']) + self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjmix:success']) + + self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix:fail']) + self.assertFailedTestCount(3, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj']) + self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail']) + self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail', 'mainprj-failing_test']) + + self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail']) + + def test_build_by_default(self): + testdir = os.path.join(self.common_test_dir, '129 build by default') + self.init(testdir) + self.build() + genfile1 = os.path.join(self.builddir, 'generated1.dat') + genfile2 = os.path.join(self.builddir, 'generated2.dat') + exe1 = os.path.join(self.builddir, 'fooprog' + exe_suffix) + exe2 = os.path.join(self.builddir, 'barprog' + exe_suffix) + self.assertPathExists(genfile1) + self.assertPathExists(genfile2) + self.assertPathDoesNotExist(exe1) + self.assertPathDoesNotExist(exe2) + self.build(target=('fooprog' + exe_suffix)) + self.assertPathExists(exe1) + self.build(target=('barprog' + exe_suffix)) + self.assertPathExists(exe2) + + def test_internal_include_order(self): + if mesonbuild.environment.detect_msys2_arch() and ('MESON_RSP_THRESHOLD' in os.environ): + raise unittest.SkipTest('Test does not yet support gcc rsp files on msys2') + + testdir = os.path.join(self.common_test_dir, '130 include order') + self.init(testdir) + execmd = fxecmd = None + for cmd in self.get_compdb(): + if 'someexe' in cmd['command']: + execmd = cmd['command'] + continue + if 'somefxe' in cmd['command']: + fxecmd = cmd['command'] + continue + if not execmd or not fxecmd: + raise Exception('Could not find someexe and somfxe commands') + # Check include order for 'someexe' + incs = [a for a in split_args(execmd) if a.startswith("-I")] + self.assertEqual(len(incs), 9) + # Need to run the build so the private dir is created. + self.build() + pdirs = glob(os.path.join(self.builddir, 'sub4/someexe*.p')) + self.assertEqual(len(pdirs), 1) + privdir = pdirs[0][len(self.builddir)+1:] + self.assertPathEqual(incs[0], "-I" + privdir) + # target build subdir + self.assertPathEqual(incs[1], "-Isub4") + # target source subdir + self.assertPathBasenameEqual(incs[2], 'sub4') + # include paths added via per-target c_args: ['-I'...] + self.assertPathBasenameEqual(incs[3], 'sub3') + # target include_directories: build dir + self.assertPathEqual(incs[4], "-Isub2") + # target include_directories: source dir + self.assertPathBasenameEqual(incs[5], 'sub2') + # target internal dependency include_directories: build dir + self.assertPathEqual(incs[6], "-Isub1") + # target internal dependency include_directories: source dir + self.assertPathBasenameEqual(incs[7], 'sub1') + # custom target include dir + self.assertPathEqual(incs[8], '-Ictsub') + # Check include order for 'somefxe' + incs = [a for a in split_args(fxecmd) if a.startswith('-I')] + self.assertEqual(len(incs), 9) + # target private dir + pdirs = glob(os.path.join(self.builddir, 'somefxe*.p')) + self.assertEqual(len(pdirs), 1) + privdir = pdirs[0][len(self.builddir)+1:] + self.assertPathEqual(incs[0], '-I' + privdir) + # target build dir + self.assertPathEqual(incs[1], '-I.') + # target source dir + self.assertPathBasenameEqual(incs[2], os.path.basename(testdir)) + # target internal dependency correct include_directories: build dir + self.assertPathEqual(incs[3], "-Isub4") + # target internal dependency correct include_directories: source dir + self.assertPathBasenameEqual(incs[4], 'sub4') + # target internal dependency dep include_directories: build dir + self.assertPathEqual(incs[5], "-Isub1") + # target internal dependency dep include_directories: source dir + self.assertPathBasenameEqual(incs[6], 'sub1') + # target internal dependency wrong include_directories: build dir + self.assertPathEqual(incs[7], "-Isub2") + # target internal dependency wrong include_directories: source dir + self.assertPathBasenameEqual(incs[8], 'sub2') + + def test_compiler_detection(self): + ''' + Test that automatic compiler detection and setting from the environment + both work just fine. This is needed because while running project tests + and other unit tests, we always read CC/CXX/etc from the environment. + ''' + gnu = mesonbuild.compilers.GnuCompiler + clang = mesonbuild.compilers.ClangCompiler + intel = mesonbuild.compilers.IntelGnuLikeCompiler + msvc = (mesonbuild.compilers.VisualStudioCCompiler, mesonbuild.compilers.VisualStudioCPPCompiler) + clangcl = (mesonbuild.compilers.ClangClCCompiler, mesonbuild.compilers.ClangClCPPCompiler) + ar = mesonbuild.linkers.ArLinker + lib = mesonbuild.linkers.VisualStudioLinker + langs = [('c', 'CC'), ('cpp', 'CXX')] + if not is_windows() and platform.machine().lower() != 'e2k': + langs += [('objc', 'OBJC'), ('objcpp', 'OBJCXX')] + testdir = os.path.join(self.unit_test_dir, '5 compiler detection') + env = get_fake_env(testdir, self.builddir, self.prefix) + for lang, evar in langs: + # Detect with evar and do sanity checks on that + if evar in os.environ: + ecc = compiler_from_language(env, lang, MachineChoice.HOST) + self.assertTrue(ecc.version) + elinker = detect_static_linker(env, ecc) + # Pop it so we don't use it for the next detection + evalue = os.environ.pop(evar) + # Very rough/strict heuristics. Would never work for actual + # compiler detection, but should be ok for the tests. + ebase = os.path.basename(evalue) + if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')): + self.assertIsInstance(ecc, gnu) + self.assertIsInstance(elinker, ar) + elif 'clang-cl' in ebase: + self.assertIsInstance(ecc, clangcl) + self.assertIsInstance(elinker, lib) + elif 'clang' in ebase: + self.assertIsInstance(ecc, clang) + self.assertIsInstance(elinker, ar) + elif ebase.startswith('ic'): + self.assertIsInstance(ecc, intel) + self.assertIsInstance(elinker, ar) + elif ebase.startswith('cl'): + self.assertIsInstance(ecc, msvc) + self.assertIsInstance(elinker, lib) + else: + raise AssertionError(f'Unknown compiler {evalue!r}') + # Check that we actually used the evalue correctly as the compiler + self.assertEqual(ecc.get_exelist(), split_args(evalue)) + # Do auto-detection of compiler based on platform, PATH, etc. + cc = compiler_from_language(env, lang, MachineChoice.HOST) + self.assertTrue(cc.version) + linker = detect_static_linker(env, cc) + # Check compiler type + if isinstance(cc, gnu): + self.assertIsInstance(linker, ar) + if is_osx(): + self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) + elif is_sunos(): + self.assertIsInstance(cc.linker, (mesonbuild.linkers.SolarisDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)) + else: + self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin) + if isinstance(cc, clangcl): + self.assertIsInstance(linker, lib) + self.assertIsInstance(cc.linker, mesonbuild.linkers.ClangClDynamicLinker) + if isinstance(cc, clang): + self.assertIsInstance(linker, ar) + if is_osx(): + self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) + elif is_windows(): + # This is clang, not clang-cl. This can be either an + # ld-like linker of link.exe-like linker (usually the + # former for msys2, the latter otherwise) + self.assertIsInstance(cc.linker, (mesonbuild.linkers.MSVCDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)) + else: + self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin) + if isinstance(cc, intel): + self.assertIsInstance(linker, ar) + if is_osx(): + self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) + elif is_windows(): + self.assertIsInstance(cc.linker, mesonbuild.linkers.XilinkDynamicLinker) + else: + self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuDynamicLinker) + if isinstance(cc, msvc): + self.assertTrue(is_windows()) + self.assertIsInstance(linker, lib) + self.assertEqual(cc.id, 'msvc') + self.assertTrue(hasattr(cc, 'is_64')) + self.assertIsInstance(cc.linker, mesonbuild.linkers.MSVCDynamicLinker) + # If we're on Windows CI, we know what the compiler will be + if 'arch' in os.environ: + if os.environ['arch'] == 'x64': + self.assertTrue(cc.is_64) + else: + self.assertFalse(cc.is_64) + # Set evar ourselves to a wrapper script that just calls the same + # exelist + some argument. This is meant to test that setting + # something like `ccache gcc -pipe` or `distcc ccache gcc` works. + wrapper = os.path.join(testdir, 'compiler wrapper.py') + wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG'] + os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc) + + # Check static linker too + wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args() + os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker) + + # Need a new env to re-run environment loading + env = get_fake_env(testdir, self.builddir, self.prefix) + + wcc = compiler_from_language(env, lang, MachineChoice.HOST) + wlinker = detect_static_linker(env, wcc) + # Pop it so we don't use it for the next detection + evalue = os.environ.pop('AR') + # Must be the same type since it's a wrapper around the same exelist + self.assertIs(type(cc), type(wcc)) + self.assertIs(type(linker), type(wlinker)) + # Ensure that the exelist is correct + self.assertEqual(wcc.get_exelist(), wrappercc) + self.assertEqual(wlinker.get_exelist(), wrapperlinker) + # Ensure that the version detection worked correctly + self.assertEqual(cc.version, wcc.version) + if hasattr(cc, 'is_64'): + self.assertEqual(cc.is_64, wcc.is_64) + + def test_always_prefer_c_compiler_for_asm(self): + testdir = os.path.join(self.common_test_dir, '133 c cpp and asm') + # Skip if building with MSVC + env = get_fake_env(testdir, self.builddir, self.prefix) + if detect_c_compiler(env, MachineChoice.HOST).get_id() == 'msvc': + raise unittest.SkipTest('MSVC can\'t compile assembly') + self.init(testdir) + commands = {'c-asm': {}, 'cpp-asm': {}, 'cpp-c-asm': {}, 'c-cpp-asm': {}} + for cmd in self.get_compdb(): + # Get compiler + split = split_args(cmd['command']) + if split[0] == 'ccache': + compiler = split[1] + else: + compiler = split[0] + # Classify commands + if 'Ic-asm' in cmd['command']: + if cmd['file'].endswith('.S'): + commands['c-asm']['asm'] = compiler + elif cmd['file'].endswith('.c'): + commands['c-asm']['c'] = compiler + else: + raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command'])) + elif 'Icpp-asm' in cmd['command']: + if cmd['file'].endswith('.S'): + commands['cpp-asm']['asm'] = compiler + elif cmd['file'].endswith('.cpp'): + commands['cpp-asm']['cpp'] = compiler + else: + raise AssertionError('{!r} found in cpp-asm?'.format(cmd['command'])) + elif 'Ic-cpp-asm' in cmd['command']: + if cmd['file'].endswith('.S'): + commands['c-cpp-asm']['asm'] = compiler + elif cmd['file'].endswith('.c'): + commands['c-cpp-asm']['c'] = compiler + elif cmd['file'].endswith('.cpp'): + commands['c-cpp-asm']['cpp'] = compiler + else: + raise AssertionError('{!r} found in c-cpp-asm?'.format(cmd['command'])) + elif 'Icpp-c-asm' in cmd['command']: + if cmd['file'].endswith('.S'): + commands['cpp-c-asm']['asm'] = compiler + elif cmd['file'].endswith('.c'): + commands['cpp-c-asm']['c'] = compiler + elif cmd['file'].endswith('.cpp'): + commands['cpp-c-asm']['cpp'] = compiler + else: + raise AssertionError('{!r} found in cpp-c-asm?'.format(cmd['command'])) + else: + raise AssertionError('Unknown command {!r} found'.format(cmd['command'])) + # Check that .S files are always built with the C compiler + self.assertEqual(commands['c-asm']['asm'], commands['c-asm']['c']) + self.assertEqual(commands['c-asm']['asm'], commands['cpp-asm']['asm']) + self.assertEqual(commands['cpp-asm']['asm'], commands['c-cpp-asm']['c']) + self.assertEqual(commands['c-cpp-asm']['asm'], commands['c-cpp-asm']['c']) + self.assertEqual(commands['cpp-c-asm']['asm'], commands['cpp-c-asm']['c']) + self.assertNotEqual(commands['cpp-asm']['asm'], commands['cpp-asm']['cpp']) + self.assertNotEqual(commands['c-cpp-asm']['c'], commands['c-cpp-asm']['cpp']) + self.assertNotEqual(commands['cpp-c-asm']['c'], commands['cpp-c-asm']['cpp']) + # Check that the c-asm target is always linked with the C linker + build_ninja = os.path.join(self.builddir, 'build.ninja') + with open(build_ninja, encoding='utf-8') as f: + contents = f.read() + m = re.search('build c-asm.*: c_LINKER', contents) + self.assertIsNotNone(m, msg=contents) + + def test_preprocessor_checks_CPPFLAGS(self): + ''' + Test that preprocessor compiler checks read CPPFLAGS and also CFLAGS but + not LDFLAGS. + ''' + testdir = os.path.join(self.common_test_dir, '132 get define') + define = 'MESON_TEST_DEFINE_VALUE' + # NOTE: this list can't have \n, ' or " + # \n is never substituted by the GNU pre-processor via a -D define + # ' and " confuse split_args() even when they are escaped + # % and # confuse the MSVC preprocessor + # !, ^, *, and < confuse lcc preprocessor + value = 'spaces and fun@$&()-=_+{}[]:;>?,./~`' + for env_var in ['CPPFLAGS', 'CFLAGS']: + env = {} + env[env_var] = f'-D{define}="{value}"' + env['LDFLAGS'] = '-DMESON_FAIL_VALUE=cflags-read' + self.init(testdir, extra_args=[f'-D{define}={value}'], override_envvars=env) + + def test_custom_target_exe_data_deterministic(self): + testdir = os.path.join(self.common_test_dir, '109 custom target capture') + self.init(testdir) + meson_exe_dat1 = glob(os.path.join(self.privatedir, 'meson_exe*.dat')) + self.wipe() + self.init(testdir) + meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat')) + self.assertListEqual(meson_exe_dat1, meson_exe_dat2) + + def test_noop_changes_cause_no_rebuilds(self): + ''' + Test that no-op changes to the build files such as mtime do not cause + a rebuild of anything. + ''' + testdir = os.path.join(self.common_test_dir, '6 linkshared') + self.init(testdir) + self.build() + # Immediately rebuilding should not do anything + self.assertBuildIsNoop() + # Changing mtime of meson.build should not rebuild anything + self.utime(os.path.join(testdir, 'meson.build')) + self.assertReconfiguredBuildIsNoop() + # Changing mtime of libefile.c should rebuild the library, but not relink the executable + self.utime(os.path.join(testdir, 'libfile.c')) + self.assertBuildRelinkedOnlyTarget('mylib') + + def test_source_changes_cause_rebuild(self): + ''' + Test that changes to sources and headers cause rebuilds, but not + changes to unused files (as determined by the dependency file) in the + input files list. + ''' + testdir = os.path.join(self.common_test_dir, '19 header in file list') + self.init(testdir) + self.build() + # Immediately rebuilding should not do anything + self.assertBuildIsNoop() + # Changing mtime of header.h should rebuild everything + self.utime(os.path.join(testdir, 'header.h')) + self.assertBuildRelinkedOnlyTarget('prog') + + def test_custom_target_changes_cause_rebuild(self): + ''' + Test that in a custom target, changes to the input files, the + ExternalProgram, and any File objects on the command-line cause + a rebuild. + ''' + testdir = os.path.join(self.common_test_dir, '57 custom header generator') + self.init(testdir) + self.build() + # Immediately rebuilding should not do anything + self.assertBuildIsNoop() + # Changing mtime of these should rebuild everything + for f in ('input.def', 'makeheader.py', 'somefile.txt'): + self.utime(os.path.join(testdir, f)) + self.assertBuildRelinkedOnlyTarget('prog') + + def test_source_generator_program_cause_rebuild(self): + ''' + Test that changes to generator programs in the source tree cause + a rebuild. + ''' + testdir = os.path.join(self.common_test_dir, '90 gen extra') + self.init(testdir) + self.build() + # Immediately rebuilding should not do anything + self.assertBuildIsNoop() + # Changing mtime of generator should rebuild the executable + self.utime(os.path.join(testdir, 'srcgen.py')) + self.assertRebuiltTarget('basic') + + def test_static_library_lto(self): + ''' + Test that static libraries can be built with LTO and linked to + executables. On Linux, this requires the use of gcc-ar. + https://github.com/mesonbuild/meson/issues/1646 + ''' + testdir = os.path.join(self.common_test_dir, '5 linkstatic') + + env = get_fake_env(testdir, self.builddir, self.prefix) + if detect_c_compiler(env, MachineChoice.HOST).get_id() == 'clang' and is_windows(): + raise unittest.SkipTest('LTO not (yet) supported by windows clang') + + self.init(testdir, extra_args='-Db_lto=true') + self.build() + self.run_tests() + + @skip_if_not_base_option('b_lto_threads') + def test_lto_threads(self): + testdir = os.path.join(self.common_test_dir, '6 linkshared') + + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + extra_args: T.List[str] = [] + if cc.get_id() == 'clang': + if is_windows(): + raise unittest.SkipTest('LTO not (yet) supported by windows clang') + else: + extra_args.append('-D_cargs=-Werror=unused-command-line-argument') + + self.init(testdir, extra_args=['-Db_lto=true', '-Db_lto_threads=8'] + extra_args) + self.build() + self.run_tests() + + expected = set(cc.get_lto_compile_args(threads=8)) + targets = self.introspect('--targets') + # This assumes all of the targets support lto + for t in targets: + for s in t['target_sources']: + for e in expected: + self.assertIn(e, s['parameters']) + + @skip_if_not_base_option('b_lto_mode') + @skip_if_not_base_option('b_lto_threads') + def test_lto_mode(self): + testdir = os.path.join(self.common_test_dir, '6 linkshared') + + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() != 'clang': + raise unittest.SkipTest('Only clang currently supports thinLTO') + if cc.linker.id not in {'ld.lld', 'ld.gold', 'ld64', 'lld-link'}: + raise unittest.SkipTest('thinLTO requires ld.lld, ld.gold, ld64, or lld-link') + elif is_windows(): + raise unittest.SkipTest('LTO not (yet) supported by windows clang') + + self.init(testdir, extra_args=['-Db_lto=true', '-Db_lto_mode=thin', '-Db_lto_threads=8', '-Dc_args=-Werror=unused-command-line-argument']) + self.build() + self.run_tests() + + expected = set(cc.get_lto_compile_args(threads=8, mode='thin')) + targets = self.introspect('--targets') + # This assumes all of the targets support lto + for t in targets: + for s in t['target_sources']: + self.assertTrue(expected.issubset(set(s['parameters'])), f'Incorrect values for {t["name"]}') + + def test_dist_git(self): + if not shutil.which('git'): + raise unittest.SkipTest('Git not found') + if self.backend is not Backend.ninja: + raise unittest.SkipTest('Dist is only supported with Ninja') + + try: + self.dist_impl(_git_init, _git_add_all) + except PermissionError: + # When run under Windows CI, something (virus scanner?) + # holds on to the git files so cleaning up the dir + # fails sometimes. + pass + + def has_working_hg(self): + if not shutil.which('hg'): + return False + try: + # This check should not be necessary, but + # CI under macOS passes the above test even + # though Mercurial is not installed. + if subprocess.call(['hg', '--version'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) != 0: + return False + return True + except FileNotFoundError: + return False + + def test_dist_hg(self): + if not self.has_working_hg(): + raise unittest.SkipTest('Mercurial not found or broken.') + if self.backend is not Backend.ninja: + raise unittest.SkipTest('Dist is only supported with Ninja') + + def hg_init(project_dir): + subprocess.check_call(['hg', 'init'], cwd=project_dir) + with open(os.path.join(project_dir, '.hg', 'hgrc'), 'w', encoding='utf-8') as f: + print('[ui]', file=f) + print('username=Author Person ', file=f) + subprocess.check_call(['hg', 'add', 'meson.build', 'distexe.c'], cwd=project_dir) + subprocess.check_call(['hg', 'commit', '-m', 'I am a project'], cwd=project_dir) + + try: + self.dist_impl(hg_init, include_subprojects=False) + except PermissionError: + # When run under Windows CI, something (virus scanner?) + # holds on to the hg files so cleaning up the dir + # fails sometimes. + pass + + def test_dist_git_script(self): + if not shutil.which('git'): + raise unittest.SkipTest('Git not found') + if self.backend is not Backend.ninja: + raise unittest.SkipTest('Dist is only supported with Ninja') + + try: + with tempfile.TemporaryDirectory() as tmpdir: + project_dir = os.path.join(tmpdir, 'a') + shutil.copytree(os.path.join(self.unit_test_dir, '35 dist script'), + project_dir) + _git_init(project_dir) + self.init(project_dir) + self.build('dist') + + self.new_builddir() + self.init(project_dir, extra_args=['-Dsub:broken_dist_script=false']) + self._run(self.meson_command + ['dist', '--include-subprojects'], workdir=self.builddir) + except PermissionError: + # When run under Windows CI, something (virus scanner?) + # holds on to the git files so cleaning up the dir + # fails sometimes. + pass + + def create_dummy_subproject(self, project_dir, name): + path = os.path.join(project_dir, 'subprojects', name) + os.makedirs(path) + with open(os.path.join(path, 'meson.build'), 'w', encoding='utf-8') as ofile: + ofile.write(f"project('{name}', version: '1.0')") + return path + + def dist_impl(self, vcs_init, vcs_add_all=None, include_subprojects=True): + # Create this on the fly because having rogue .git directories inside + # the source tree leads to all kinds of trouble. + with tempfile.TemporaryDirectory() as project_dir: + with open(os.path.join(project_dir, 'meson.build'), 'w', encoding='utf-8') as ofile: + ofile.write(textwrap.dedent('''\ + project('disttest', 'c', version : '1.4.3') + e = executable('distexe', 'distexe.c') + test('dist test', e) + subproject('vcssub', required : false) + subproject('tarballsub', required : false) + subproject('samerepo', required : false) + ''')) + with open(os.path.join(project_dir, 'distexe.c'), 'w', encoding='utf-8') as ofile: + ofile.write(textwrap.dedent('''\ + #include + + int main(int argc, char **argv) { + printf("I am a distribution test.\\n"); + return 0; + } + ''')) + xz_distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.xz') + xz_checksumfile = xz_distfile + '.sha256sum' + gz_distfile = os.path.join(self.distdir, 'disttest-1.4.3.tar.gz') + gz_checksumfile = gz_distfile + '.sha256sum' + zip_distfile = os.path.join(self.distdir, 'disttest-1.4.3.zip') + zip_checksumfile = zip_distfile + '.sha256sum' + vcs_init(project_dir) + if include_subprojects: + vcs_init(self.create_dummy_subproject(project_dir, 'vcssub')) + self.create_dummy_subproject(project_dir, 'tarballsub') + self.create_dummy_subproject(project_dir, 'unusedsub') + if vcs_add_all: + vcs_add_all(self.create_dummy_subproject(project_dir, 'samerepo')) + self.init(project_dir) + self.build('dist') + self.assertPathExists(xz_distfile) + self.assertPathExists(xz_checksumfile) + self.assertPathDoesNotExist(gz_distfile) + self.assertPathDoesNotExist(gz_checksumfile) + self.assertPathDoesNotExist(zip_distfile) + self.assertPathDoesNotExist(zip_checksumfile) + self._run(self.meson_command + ['dist', '--formats', 'gztar'], + workdir=self.builddir) + self.assertPathExists(gz_distfile) + self.assertPathExists(gz_checksumfile) + self._run(self.meson_command + ['dist', '--formats', 'zip'], + workdir=self.builddir) + self.assertPathExists(zip_distfile) + self.assertPathExists(zip_checksumfile) + os.remove(xz_distfile) + os.remove(xz_checksumfile) + os.remove(gz_distfile) + os.remove(gz_checksumfile) + os.remove(zip_distfile) + os.remove(zip_checksumfile) + self._run(self.meson_command + ['dist', '--formats', 'xztar,gztar,zip'], + workdir=self.builddir) + self.assertPathExists(xz_distfile) + self.assertPathExists(xz_checksumfile) + self.assertPathExists(gz_distfile) + self.assertPathExists(gz_checksumfile) + self.assertPathExists(zip_distfile) + self.assertPathExists(zip_checksumfile) + + if include_subprojects: + # Verify that without --include-subprojects we have files from + # the main project and also files from subprojects part of the + # main vcs repository. + z = zipfile.ZipFile(zip_distfile) + expected = ['disttest-1.4.3/', + 'disttest-1.4.3/meson.build', + 'disttest-1.4.3/distexe.c'] + if vcs_add_all: + expected += ['disttest-1.4.3/subprojects/', + 'disttest-1.4.3/subprojects/samerepo/', + 'disttest-1.4.3/subprojects/samerepo/meson.build'] + self.assertEqual(sorted(expected), + sorted(z.namelist())) + # Verify that with --include-subprojects we now also have files + # from tarball and separate vcs subprojects. But not files from + # unused subprojects. + self._run(self.meson_command + ['dist', '--formats', 'zip', '--include-subprojects'], + workdir=self.builddir) + z = zipfile.ZipFile(zip_distfile) + expected += ['disttest-1.4.3/subprojects/tarballsub/', + 'disttest-1.4.3/subprojects/tarballsub/meson.build', + 'disttest-1.4.3/subprojects/vcssub/', + 'disttest-1.4.3/subprojects/vcssub/meson.build'] + self.assertEqual(sorted(expected), + sorted(z.namelist())) + if vcs_add_all: + # Verify we can distribute separately subprojects in the same vcs + # repository as the main project. + subproject_dir = os.path.join(project_dir, 'subprojects', 'samerepo') + self.new_builddir() + self.init(subproject_dir) + self.build('dist') + xz_distfile = os.path.join(self.distdir, 'samerepo-1.0.tar.xz') + xz_checksumfile = xz_distfile + '.sha256sum' + self.assertPathExists(xz_distfile) + self.assertPathExists(xz_checksumfile) + tar = tarfile.open(xz_distfile, "r:xz") # [ignore encoding] + self.assertEqual(sorted(['samerepo-1.0', + 'samerepo-1.0/meson.build']), + sorted([i.name for i in tar])) + + def test_rpath_uses_ORIGIN(self): + ''' + Test that built targets use $ORIGIN in rpath, which ensures that they + are relocatable and ensures that builds are reproducible since the + build directory won't get embedded into the built binaries. + ''' + if is_windows() or is_cygwin(): + raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') + testdir = os.path.join(self.common_test_dir, '39 library chain') + self.init(testdir) + self.build() + for each in ('prog', 'subdir/liblib1.so', ): + rpath = get_rpath(os.path.join(self.builddir, each)) + self.assertTrue(rpath, f'Rpath could not be determined for {each}.') + if is_dragonflybsd(): + # DragonflyBSD will prepend /usr/lib/gccVERSION to the rpath, + # so ignore that. + self.assertTrue(rpath.startswith('/usr/lib/gcc')) + rpaths = rpath.split(':')[1:] + else: + rpaths = rpath.split(':') + for path in rpaths: + self.assertTrue(path.startswith('$ORIGIN'), msg=(each, path)) + # These two don't link to anything else, so they do not need an rpath entry. + for each in ('subdir/subdir2/liblib2.so', 'subdir/subdir3/liblib3.so'): + rpath = get_rpath(os.path.join(self.builddir, each)) + if is_dragonflybsd(): + # The rpath should be equal to /usr/lib/gccVERSION + self.assertTrue(rpath.startswith('/usr/lib/gcc')) + self.assertEqual(len(rpath.split(':')), 1) + else: + self.assertTrue(rpath is None) + + def test_dash_d_dedup(self): + testdir = os.path.join(self.unit_test_dir, '9 d dedup') + self.init(testdir) + cmd = self.get_compdb()[0]['command'] + self.assertTrue('-D FOO -D BAR' in cmd or + '"-D" "FOO" "-D" "BAR"' in cmd or + '/D FOO /D BAR' in cmd or + '"/D" "FOO" "/D" "BAR"' in cmd) + + def test_all_forbidden_targets_tested(self): + ''' + Test that all forbidden targets are tested in the '150 reserved targets' + test. Needs to be a unit test because it accesses Meson internals. + ''' + testdir = os.path.join(self.common_test_dir, '150 reserved targets') + targets = mesonbuild.coredata.FORBIDDEN_TARGET_NAMES + # We don't actually define a target with this name + targets.pop('build.ninja') + # Remove this to avoid multiple entries with the same name + # but different case. + targets.pop('PHONY') + for i in targets: + self.assertPathExists(os.path.join(testdir, i)) + + def detect_prebuild_env(self): + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + stlinker = detect_static_linker(env, cc) + if mesonbuild.mesonlib.is_windows(): + object_suffix = 'obj' + shared_suffix = 'dll' + elif mesonbuild.mesonlib.is_cygwin(): + object_suffix = 'o' + shared_suffix = 'dll' + elif mesonbuild.mesonlib.is_osx(): + object_suffix = 'o' + shared_suffix = 'dylib' + else: + object_suffix = 'o' + shared_suffix = 'so' + return (cc, stlinker, object_suffix, shared_suffix) + + def pbcompile(self, compiler, source, objectfile, extra_args=None): + cmd = compiler.get_exelist() + extra_args = extra_args or [] + if compiler.get_argument_syntax() == 'msvc': + cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args + else: + cmd += ['-c', source, '-o', objectfile] + extra_args + subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + + def test_prebuilt_object(self): + (compiler, _, object_suffix, _) = self.detect_prebuild_env() + tdir = os.path.join(self.unit_test_dir, '15 prebuilt object') + source = os.path.join(tdir, 'source.c') + objectfile = os.path.join(tdir, 'prebuilt.' + object_suffix) + self.pbcompile(compiler, source, objectfile) + try: + self.init(tdir) + self.build() + self.run_tests() + finally: + os.unlink(objectfile) + + def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None): + if extra_args is None: + extra_args = [] + if compiler.get_argument_syntax() == 'msvc': + link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile] + else: + link_cmd = ['ar', 'csr', outfile, objectfile] + link_cmd = linker.get_exelist() + link_cmd += linker.get_always_args() + link_cmd += linker.get_std_link_args() + link_cmd += linker.get_output_args(outfile) + link_cmd += [objectfile] + self.pbcompile(compiler, source, objectfile, extra_args=extra_args) + try: + subprocess.check_call(link_cmd) + finally: + os.unlink(objectfile) + + def test_prebuilt_static_lib(self): + (cc, stlinker, object_suffix, _) = self.detect_prebuild_env() + tdir = os.path.join(self.unit_test_dir, '16 prebuilt static') + source = os.path.join(tdir, 'libdir/best.c') + objectfile = os.path.join(tdir, 'libdir/best.' + object_suffix) + stlibfile = os.path.join(tdir, 'libdir/libbest.a') + self.build_static_lib(cc, stlinker, source, objectfile, stlibfile) + # Run the test + try: + self.init(tdir) + self.build() + self.run_tests() + finally: + os.unlink(stlibfile) + + def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None): + if extra_args is None: + extra_args = [] + if compiler.get_argument_syntax() == 'msvc': + link_cmd = compiler.get_linker_exelist() + [ + '/NOLOGO', '/DLL', '/DEBUG', '/IMPLIB:' + impfile, + '/OUT:' + outfile, objectfile] + else: + if not (compiler.info.is_windows() or compiler.info.is_cygwin() or compiler.info.is_darwin()): + extra_args += ['-fPIC'] + link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile] + if not mesonbuild.mesonlib.is_osx(): + link_cmd += ['-Wl,-soname=' + os.path.basename(outfile)] + self.pbcompile(compiler, source, objectfile, extra_args=extra_args) + try: + subprocess.check_call(link_cmd) + finally: + os.unlink(objectfile) + + def test_prebuilt_shared_lib(self): + (cc, _, object_suffix, shared_suffix) = self.detect_prebuild_env() + tdir = os.path.join(self.unit_test_dir, '17 prebuilt shared') + source = os.path.join(tdir, 'alexandria.c') + objectfile = os.path.join(tdir, 'alexandria.' + object_suffix) + impfile = os.path.join(tdir, 'alexandria.lib') + if cc.get_argument_syntax() == 'msvc': + shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix) + elif is_cygwin(): + shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix) + else: + shlibfile = os.path.join(tdir, 'libalexandria.' + shared_suffix) + self.build_shared_lib(cc, source, objectfile, shlibfile, impfile) + # Run the test + try: + self.init(tdir) + self.build() + self.run_tests() + finally: + os.unlink(shlibfile) + if mesonbuild.mesonlib.is_windows(): + # Clean up all the garbage MSVC writes in the + # source tree. + for fname in glob(os.path.join(tdir, 'alexandria.*')): + if os.path.splitext(fname)[1] not in ['.c', '.h']: + os.unlink(fname) + + @skipIfNoPkgconfig + def test_pkgconfig_static(self): + ''' + Test that the we prefer static libraries when `static: true` is + passed to dependency() with pkg-config. Can't be an ordinary test + because we need to build libs and try to find them from meson.build + + Also test that it's not a hard error to have unsatisfiable library deps + since system libraries -lm will never be found statically. + https://github.com/mesonbuild/meson/issues/2785 + ''' + (cc, stlinker, objext, shext) = self.detect_prebuild_env() + testdir = os.path.join(self.unit_test_dir, '18 pkgconfig static') + source = os.path.join(testdir, 'foo.c') + objectfile = os.path.join(testdir, 'foo.' + objext) + stlibfile = os.path.join(testdir, 'libfoo.a') + impfile = os.path.join(testdir, 'foo.lib') + if cc.get_argument_syntax() == 'msvc': + shlibfile = os.path.join(testdir, 'foo.' + shext) + elif is_cygwin(): + shlibfile = os.path.join(testdir, 'cygfoo.' + shext) + else: + shlibfile = os.path.join(testdir, 'libfoo.' + shext) + # Build libs + self.build_static_lib(cc, stlinker, source, objectfile, stlibfile, extra_args=['-DFOO_STATIC']) + self.build_shared_lib(cc, source, objectfile, shlibfile, impfile) + # Run test + try: + self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': self.builddir}) + self.build() + self.run_tests() + finally: + os.unlink(stlibfile) + os.unlink(shlibfile) + if mesonbuild.mesonlib.is_windows(): + # Clean up all the garbage MSVC writes in the + # source tree. + for fname in glob(os.path.join(testdir, 'foo.*')): + if os.path.splitext(fname)[1] not in ['.c', '.h', '.in']: + os.unlink(fname) + + @skipIfNoPkgconfig + @mock.patch.dict(os.environ) + def test_pkgconfig_gen_escaping(self): + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen') + prefix = '/usr/with spaces' + libdir = 'lib' + self.init(testdir, extra_args=['--prefix=' + prefix, + '--libdir=' + libdir]) + # Find foo dependency + os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir + env = get_fake_env(testdir, self.builddir, self.prefix) + kwargs = {'required': True, 'silent': True} + foo_dep = PkgConfigDependency('libfoo', env, kwargs) + # Ensure link_args are properly quoted + libdir = PurePath(prefix) / PurePath(libdir) + link_args = ['-L' + libdir.as_posix(), '-lfoo'] + self.assertEqual(foo_dep.get_link_args(), link_args) + # Ensure include args are properly quoted + incdir = PurePath(prefix) / PurePath('include') + cargs = ['-I' + incdir.as_posix(), '-DLIBFOO'] + # pkg-config and pkgconf does not respect the same order + self.assertEqual(sorted(foo_dep.get_compile_args()), sorted(cargs)) + + def test_array_option_change(self): + def get_opt(): + opts = self.introspect('--buildoptions') + for x in opts: + if x.get('name') == 'list': + return x + raise Exception(opts) + + expected = { + 'name': 'list', + 'description': 'list', + 'section': 'user', + 'type': 'array', + 'value': ['foo', 'bar'], + 'choices': ['foo', 'bar', 'oink', 'boink'], + 'machine': 'any', + } + tdir = os.path.join(self.unit_test_dir, '19 array option') + self.init(tdir) + original = get_opt() + self.assertDictEqual(original, expected) + + expected['value'] = ['oink', 'boink'] + self.setconf('-Dlist=oink,boink') + changed = get_opt() + self.assertEqual(changed, expected) + + def test_array_option_bad_change(self): + def get_opt(): + opts = self.introspect('--buildoptions') + for x in opts: + if x.get('name') == 'list': + return x + raise Exception(opts) + + expected = { + 'name': 'list', + 'description': 'list', + 'section': 'user', + 'type': 'array', + 'value': ['foo', 'bar'], + 'choices': ['foo', 'bar', 'oink', 'boink'], + 'machine': 'any', + } + tdir = os.path.join(self.unit_test_dir, '19 array option') + self.init(tdir) + original = get_opt() + self.assertDictEqual(original, expected) + with self.assertRaises(subprocess.CalledProcessError): + self.setconf('-Dlist=bad') + changed = get_opt() + self.assertDictEqual(changed, expected) + + def test_array_option_empty_equivalents(self): + """Array options treat -Dopt=[] and -Dopt= as equivalent.""" + def get_opt(): + opts = self.introspect('--buildoptions') + for x in opts: + if x.get('name') == 'list': + return x + raise Exception(opts) + + expected = { + 'name': 'list', + 'description': 'list', + 'section': 'user', + 'type': 'array', + 'value': [], + 'choices': ['foo', 'bar', 'oink', 'boink'], + 'machine': 'any', + } + tdir = os.path.join(self.unit_test_dir, '19 array option') + self.init(tdir, extra_args='-Dlist=') + original = get_opt() + self.assertDictEqual(original, expected) + + def opt_has(self, name, value): + res = self.introspect('--buildoptions') + found = False + for i in res: + if i['name'] == name: + self.assertEqual(i['value'], value) + found = True + break + self.assertTrue(found, "Array option not found in introspect data.") + + def test_free_stringarray_setting(self): + testdir = os.path.join(self.common_test_dir, '40 options') + self.init(testdir) + self.opt_has('free_array_opt', []) + self.setconf('-Dfree_array_opt=foo,bar', will_build=False) + self.opt_has('free_array_opt', ['foo', 'bar']) + self.setconf("-Dfree_array_opt=['a,b', 'c,d']", will_build=False) + self.opt_has('free_array_opt', ['a,b', 'c,d']) + + # When running under Travis Mac CI, the file updates seem to happen + # too fast so the timestamps do not get properly updated. + # Call this method before file operations in appropriate places + # to make things work. + def mac_ci_delay(self): + if is_osx() and is_ci(): + import time + time.sleep(1) + + def test_options_with_choices_changing(self) -> None: + """Detect when options like arrays or combos have their choices change.""" + testdir = Path(os.path.join(self.unit_test_dir, '84 change option choices')) + options1 = str(testdir / 'meson_options.1.txt') + options2 = str(testdir / 'meson_options.2.txt') + + # Test that old options are changed to the new defaults if they are not valid + real_options = str(testdir / 'meson_options.txt') + self.addCleanup(os.unlink, real_options) + + shutil.copy(options1, real_options) + self.init(str(testdir)) + self.mac_ci_delay() + shutil.copy(options2, real_options) + + self.build() + opts = self.introspect('--buildoptions') + for item in opts: + if item['name'] == 'combo': + self.assertEqual(item['value'], 'b') + self.assertEqual(item['choices'], ['b', 'c', 'd']) + elif item['name'] == 'array': + self.assertEqual(item['value'], ['b']) + self.assertEqual(item['choices'], ['b', 'c', 'd']) + + self.wipe() + self.mac_ci_delay() + + # When the old options are valid they should remain + shutil.copy(options1, real_options) + self.init(str(testdir), extra_args=['-Dcombo=c', '-Darray=b,c']) + self.mac_ci_delay() + shutil.copy(options2, real_options) + self.build() + opts = self.introspect('--buildoptions') + for item in opts: + if item['name'] == 'combo': + self.assertEqual(item['value'], 'c') + self.assertEqual(item['choices'], ['b', 'c', 'd']) + elif item['name'] == 'array': + self.assertEqual(item['value'], ['b', 'c']) + self.assertEqual(item['choices'], ['b', 'c', 'd']) + + def test_subproject_promotion(self): + testdir = os.path.join(self.unit_test_dir, '12 promote') + workdir = os.path.join(self.builddir, 'work') + shutil.copytree(testdir, workdir) + spdir = os.path.join(workdir, 'subprojects') + s3dir = os.path.join(spdir, 's3') + scommondir = os.path.join(spdir, 'scommon') + self.assertFalse(os.path.isdir(s3dir)) + subprocess.check_call(self.wrap_command + ['promote', 's3'], + cwd=workdir, + stdout=subprocess.DEVNULL) + self.assertTrue(os.path.isdir(s3dir)) + self.assertFalse(os.path.isdir(scommondir)) + self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'scommon'], + cwd=workdir, + stderr=subprocess.DEVNULL), 0) + self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'invalid/path/to/scommon'], + cwd=workdir, + stderr=subprocess.DEVNULL), 0) + self.assertFalse(os.path.isdir(scommondir)) + subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/scommon'], cwd=workdir) + self.assertTrue(os.path.isdir(scommondir)) + promoted_wrap = os.path.join(spdir, 'athing.wrap') + self.assertFalse(os.path.isfile(promoted_wrap)) + subprocess.check_call(self.wrap_command + ['promote', 'athing'], cwd=workdir) + self.assertTrue(os.path.isfile(promoted_wrap)) + self.init(workdir) + self.build() + + def test_subproject_promotion_wrap(self): + testdir = os.path.join(self.unit_test_dir, '44 promote wrap') + workdir = os.path.join(self.builddir, 'work') + shutil.copytree(testdir, workdir) + spdir = os.path.join(workdir, 'subprojects') + + ambiguous_wrap = os.path.join(spdir, 'ambiguous.wrap') + self.assertNotEqual(subprocess.call(self.wrap_command + ['promote', 'ambiguous'], + cwd=workdir, + stderr=subprocess.DEVNULL), 0) + self.assertFalse(os.path.isfile(ambiguous_wrap)) + subprocess.check_call(self.wrap_command + ['promote', 'subprojects/s2/subprojects/ambiguous.wrap'], cwd=workdir) + self.assertTrue(os.path.isfile(ambiguous_wrap)) + + def test_warning_location(self): + tdir = os.path.join(self.unit_test_dir, '22 warning location') + out = self.init(tdir) + for expected in [ + r'meson.build:4: WARNING: Keyword argument "link_with" defined multiple times.', + r'sub' + os.path.sep + r'meson.build:3: WARNING: Keyword argument "link_with" defined multiple times.', + r'meson.build:6: WARNING: a warning of some sort', + r'sub' + os.path.sep + r'meson.build:4: WARNING: subdir warning', + r'meson.build:7: WARNING: Module unstable-simd has no backwards or forwards compatibility and might not exist in future releases.', + r"meson.build:11: WARNING: The variable(s) 'MISSING' in the input file 'conf.in' are not present in the given configuration data.", + r'meson.build:1: WARNING: Passed invalid keyword argument "invalid".', + ]: + self.assertRegex(out, re.escape(expected)) + + for wd in [ + self.src_root, + self.builddir, + os.getcwd(), + ]: + self.new_builddir() + out = self.init(tdir, workdir=wd) + expected = os.path.join(relpath(tdir, self.src_root), 'meson.build') + relwd = relpath(self.src_root, wd) + if relwd != '.': + expected = os.path.join(relwd, expected) + expected = '\n' + expected + ':' + self.assertIn(expected, out) + + def test_error_location_path(self): + '''Test locations in meson errors contain correct paths''' + # this list contains errors from all the different steps in the + # lexer/parser/interpreter we have tests for. + for (t, f) in [ + ('10 out of bounds', 'meson.build'), + ('18 wrong plusassign', 'meson.build'), + ('60 bad option argument', 'meson_options.txt'), + ('98 subdir parse error', os.path.join('subdir', 'meson.build')), + ('99 invalid option file', 'meson_options.txt'), + ]: + tdir = os.path.join(self.src_root, 'test cases', 'failing', t) + + for wd in [ + self.src_root, + self.builddir, + os.getcwd(), + ]: + try: + self.init(tdir, workdir=wd) + except subprocess.CalledProcessError as e: + expected = os.path.join('test cases', 'failing', t, f) + relwd = relpath(self.src_root, wd) + if relwd != '.': + expected = os.path.join(relwd, expected) + expected = '\n' + expected + ':' + self.assertIn(expected, e.output) + else: + self.fail('configure unexpectedly succeeded') + + def test_permitted_method_kwargs(self): + tdir = os.path.join(self.unit_test_dir, '25 non-permitted kwargs') + out = self.init(tdir) + for expected in [ + r'WARNING: Passed invalid keyword argument "prefixxx".', + r'WARNING: Passed invalid keyword argument "argsxx".', + r'WARNING: Passed invalid keyword argument "invalidxx".', + ]: + self.assertRegex(out, re.escape(expected)) + + def test_templates(self): + ninja = detect_ninja() + if ninja is None: + raise unittest.SkipTest('This test currently requires ninja. Fix this once "meson build" works.') + + langs = ['c'] + env = get_fake_env() + for l in ['cpp', 'cs', 'd', 'java', 'cuda', 'fortran', 'objc', 'objcpp', 'rust']: + try: + comp = mesonbuild.compilers.detect_compiler_for(env, l, MachineChoice.HOST) + with tempfile.TemporaryDirectory() as d: + comp.sanity_check(d, env) + langs.append(l) + except EnvironmentException: + pass + + # The D template fails under mac CI and we don't know why. + # Patches welcome + if is_osx(): + langs = [l for l in langs if l != 'd'] + + for lang in langs: + for target_type in ('executable', 'library'): + # test empty directory + with tempfile.TemporaryDirectory() as tmpdir: + self._run(self.meson_command + ['init', '--language', lang, '--type', target_type], + workdir=tmpdir) + self._run(self.setup_command + ['--backend=ninja', 'builddir'], + workdir=tmpdir) + self._run(ninja, + workdir=os.path.join(tmpdir, 'builddir')) + # test directory with existing code file + if lang in {'c', 'cpp', 'd'}: + with tempfile.TemporaryDirectory() as tmpdir: + with open(os.path.join(tmpdir, 'foo.' + lang), 'w', encoding='utf-8') as f: + f.write('int main(void) {}') + self._run(self.meson_command + ['init', '-b'], workdir=tmpdir) + elif lang in {'java'}: + with tempfile.TemporaryDirectory() as tmpdir: + with open(os.path.join(tmpdir, 'Foo.' + lang), 'w', encoding='utf-8') as f: + f.write('public class Foo { public static void main() {} }') + self._run(self.meson_command + ['init', '-b'], workdir=tmpdir) + + def test_compiler_run_command(self): + ''' + The test checks that the compiler object can be passed to + run_command(). + ''' + testdir = os.path.join(self.unit_test_dir, '24 compiler run_command') + self.init(testdir) + + def test_identical_target_name_in_subproject_flat_layout(self): + ''' + Test that identical targets in different subprojects do not collide + if layout is flat. + ''' + testdir = os.path.join(self.common_test_dir, '172 identical target name in subproject flat layout') + self.init(testdir, extra_args=['--layout=flat']) + self.build() + + def test_identical_target_name_in_subdir_flat_layout(self): + ''' + Test that identical targets in different subdirs do not collide + if layout is flat. + ''' + testdir = os.path.join(self.common_test_dir, '181 same target name flat layout') + self.init(testdir, extra_args=['--layout=flat']) + self.build() + + def test_flock(self): + exception_raised = False + with tempfile.TemporaryDirectory() as tdir: + os.mkdir(os.path.join(tdir, 'meson-private')) + with BuildDirLock(tdir): + try: + with BuildDirLock(tdir): + pass + except MesonException: + exception_raised = True + self.assertTrue(exception_raised, 'Double locking did not raise exception.') + + @unittest.skipIf(is_osx(), 'Test not applicable to OSX') + def test_check_module_linking(self): + """ + Test that link_with: a shared module issues a warning + https://github.com/mesonbuild/meson/issues/2865 + (That an error is raised on OSX is exercised by test failing/78) + """ + tdir = os.path.join(self.unit_test_dir, '30 shared_mod linking') + out = self.init(tdir) + msg = ('WARNING: target links against shared modules. This is not ' + 'recommended as it is not supported on some platforms') + self.assertIn(msg, out) + + def test_ndebug_if_release_disabled(self): + testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release') + self.init(testdir, extra_args=['--buildtype=release', '-Db_ndebug=if-release']) + self.build() + exe = os.path.join(self.builddir, 'main') + self.assertEqual(b'NDEBUG=1', subprocess.check_output(exe).strip()) + + def test_ndebug_if_release_enabled(self): + testdir = os.path.join(self.unit_test_dir, '28 ndebug if-release') + self.init(testdir, extra_args=['--buildtype=debugoptimized', '-Db_ndebug=if-release']) + self.build() + exe = os.path.join(self.builddir, 'main') + self.assertEqual(b'NDEBUG=0', subprocess.check_output(exe).strip()) + + def test_guessed_linker_dependencies(self): + ''' + Test that meson adds dependencies for libraries based on the final + linker command line. + ''' + testdirbase = os.path.join(self.unit_test_dir, '29 guessed linker dependencies') + testdirlib = os.path.join(testdirbase, 'lib') + + extra_args = None + libdir_flags = ['-L'] + env = get_fake_env(testdirlib, self.builddir, self.prefix) + if detect_c_compiler(env, MachineChoice.HOST).get_id() in {'msvc', 'clang-cl', 'intel-cl'}: + # msvc-like compiler, also test it with msvc-specific flags + libdir_flags += ['/LIBPATH:', '-LIBPATH:'] + else: + # static libraries are not linkable with -l with msvc because meson installs them + # as .a files which unix_args_to_native will not know as it expects libraries to use + # .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc + # this tests needs to use shared libraries to test the path resolving logic in the + # dependency generation code path. + extra_args = ['--default-library', 'static'] + + initial_builddir = self.builddir + initial_installdir = self.installdir + + for libdir_flag in libdir_flags: + # build library + self.new_builddir() + self.init(testdirlib, extra_args=extra_args) + self.build() + self.install() + libbuilddir = self.builddir + installdir = self.installdir + libdir = os.path.join(self.installdir, self.prefix.lstrip('/').lstrip('\\'), 'lib') + + # build user of library + self.new_builddir() + # replace is needed because meson mangles platform paths passed via LDFLAGS + self.init(os.path.join(testdirbase, 'exe'), + override_envvars={"LDFLAGS": '{}{}'.format(libdir_flag, libdir.replace('\\', '/'))}) + self.build() + self.assertBuildIsNoop() + + # rebuild library + exebuilddir = self.builddir + self.installdir = installdir + self.builddir = libbuilddir + # Microsoft's compiler is quite smart about touching import libs on changes, + # so ensure that there is actually a change in symbols. + self.setconf('-Dmore_exports=true') + self.build() + self.install() + # no ensure_backend_detects_changes needed because self.setconf did that already + + # assert user of library will be rebuild + self.builddir = exebuilddir + self.assertRebuiltTarget('app') + + # restore dirs for the next test case + self.installdir = initial_builddir + self.builddir = initial_installdir + + def test_conflicting_d_dash_option(self): + testdir = os.path.join(self.unit_test_dir, '37 mixed command line args') + with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as e: + self.init(testdir, extra_args=['-Dbindir=foo', '--bindir=bar']) + # Just to ensure that we caught the correct error + self.assertIn('as both', e.stderr) + + def _test_same_option_twice(self, arg, args): + testdir = os.path.join(self.unit_test_dir, '37 mixed command line args') + self.init(testdir, extra_args=args) + opts = self.introspect('--buildoptions') + for item in opts: + if item['name'] == arg: + self.assertEqual(item['value'], 'bar') + return + raise Exception(f'Missing {arg} value?') + + def test_same_dash_option_twice(self): + self._test_same_option_twice('bindir', ['--bindir=foo', '--bindir=bar']) + + def test_same_d_option_twice(self): + self._test_same_option_twice('bindir', ['-Dbindir=foo', '-Dbindir=bar']) + + def test_same_project_d_option_twice(self): + self._test_same_option_twice('one', ['-Done=foo', '-Done=bar']) + + def _test_same_option_twice_configure(self, arg, args): + testdir = os.path.join(self.unit_test_dir, '37 mixed command line args') + self.init(testdir) + self.setconf(args) + opts = self.introspect('--buildoptions') + for item in opts: + if item['name'] == arg: + self.assertEqual(item['value'], 'bar') + return + raise Exception(f'Missing {arg} value?') + + def test_same_dash_option_twice_configure(self): + self._test_same_option_twice_configure( + 'bindir', ['--bindir=foo', '--bindir=bar']) + + def test_same_d_option_twice_configure(self): + self._test_same_option_twice_configure( + 'bindir', ['-Dbindir=foo', '-Dbindir=bar']) + + def test_same_project_d_option_twice_configure(self): + self._test_same_option_twice_configure( + 'one', ['-Done=foo', '-Done=bar']) + + def test_command_line(self): + testdir = os.path.join(self.unit_test_dir, '34 command line') + + # Verify default values when passing no args that affect the + # configuration, and as a bonus, test that --profile-self works. + out = self.init(testdir, extra_args=['--profile-self', '--fatal-meson-warnings']) + self.assertNotIn('[default: true]', out) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('default_library')].value, 'static') + self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') + self.assertEqual(obj.options[OptionKey('set_sub_opt')].value, True) + self.assertEqual(obj.options[OptionKey('subp_opt', 'subp')].value, 'default3') + self.wipe() + + # warning_level is special, it's --warnlevel instead of --warning-level + # for historical reasons + self.init(testdir, extra_args=['--warnlevel=2', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '2') + self.setconf('--warnlevel=3') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '3') + self.wipe() + + # But when using -D syntax, it should be 'warning_level' + self.init(testdir, extra_args=['-Dwarning_level=2', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '2') + self.setconf('-Dwarning_level=3') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '3') + self.wipe() + + # Mixing --option and -Doption is forbidden + with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: + self.init(testdir, extra_args=['--warnlevel=1', '-Dwarning_level=3']) + if isinstance(cm.exception, subprocess.CalledProcessError): + self.assertNotEqual(0, cm.exception.returncode) + self.assertIn('as both', cm.exception.output) + else: + self.assertIn('as both', str(cm.exception)) + self.init(testdir) + with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: + self.setconf(['--warnlevel=1', '-Dwarning_level=3']) + if isinstance(cm.exception, subprocess.CalledProcessError): + self.assertNotEqual(0, cm.exception.returncode) + self.assertIn('as both', cm.exception.output) + else: + self.assertIn('as both', str(cm.exception)) + self.wipe() + + # --default-library should override default value from project() + self.init(testdir, extra_args=['--default-library=both', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('default_library')].value, 'both') + self.setconf('--default-library=shared') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('default_library')].value, 'shared') + if self.backend is Backend.ninja: + # reconfigure target works only with ninja backend + self.build('reconfigure') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('default_library')].value, 'shared') + self.wipe() + + # Should warn on unknown options + out = self.init(testdir, extra_args=['-Dbad=1', '-Dfoo=2', '-Dwrong_link_args=foo']) + self.assertIn('Unknown options: "bad, foo, wrong_link_args"', out) + self.wipe() + + # Should fail on malformed option + msg = "Option 'foo' must have a value separated by equals sign." + with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: + self.init(testdir, extra_args=['-Dfoo']) + if isinstance(cm.exception, subprocess.CalledProcessError): + self.assertNotEqual(0, cm.exception.returncode) + self.assertIn(msg, cm.exception.output) + else: + self.assertIn(msg, str(cm.exception)) + self.init(testdir) + with self.assertRaises((subprocess.CalledProcessError, RuntimeError)) as cm: + self.setconf('-Dfoo') + if isinstance(cm.exception, subprocess.CalledProcessError): + self.assertNotEqual(0, cm.exception.returncode) + self.assertIn(msg, cm.exception.output) + else: + self.assertIn(msg, str(cm.exception)) + self.wipe() + + # It is not an error to set wrong option for unknown subprojects or + # language because we don't have control on which one will be selected. + self.init(testdir, extra_args=['-Dc_wrong=1', '-Dwrong:bad=1', '-Db_wrong=1']) + self.wipe() + + # Test we can set subproject option + self.init(testdir, extra_args=['-Dsubp:subp_opt=foo', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('subp_opt', 'subp')].value, 'foo') + self.wipe() + + # c_args value should be parsed with split_args + self.init(testdir, extra_args=['-Dc_args=-Dfoo -Dbar "-Dthird=one two"', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dfoo', '-Dbar', '-Dthird=one two']) + + self.setconf('-Dc_args="foo bar" one two') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['foo bar', 'one', 'two']) + self.wipe() + + self.init(testdir, extra_args=['-Dset_percent_opt=myoption%', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('set_percent_opt')].value, 'myoption%') + self.wipe() + + # Setting a 2nd time the same option should override the first value + try: + self.init(testdir, extra_args=['--bindir=foo', '--bindir=bar', + '-Dbuildtype=plain', '-Dbuildtype=release', + '-Db_sanitize=address', '-Db_sanitize=thread', + '-Dc_args=-Dfoo', '-Dc_args=-Dbar', + '-Db_lundef=false', '--fatal-meson-warnings']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('bindir')].value, 'bar') + self.assertEqual(obj.options[OptionKey('buildtype')].value, 'release') + self.assertEqual(obj.options[OptionKey('b_sanitize')].value, 'thread') + self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dbar']) + self.setconf(['--bindir=bar', '--bindir=foo', + '-Dbuildtype=release', '-Dbuildtype=plain', + '-Db_sanitize=thread', '-Db_sanitize=address', + '-Dc_args=-Dbar', '-Dc_args=-Dfoo']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('bindir')].value, 'foo') + self.assertEqual(obj.options[OptionKey('buildtype')].value, 'plain') + self.assertEqual(obj.options[OptionKey('b_sanitize')].value, 'address') + self.assertEqual(obj.options[OptionKey('args', lang='c')].value, ['-Dfoo']) + self.wipe() + except KeyError: + # Ignore KeyError, it happens on CI for compilers that does not + # support b_sanitize. We have to test with a base option because + # they used to fail this test with Meson 0.46 an earlier versions. + pass + + def test_warning_level_0(self): + testdir = os.path.join(self.common_test_dir, '207 warning level 0') + + # Verify default values when passing no args + self.init(testdir) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') + self.wipe() + + # verify we can override w/ --warnlevel + self.init(testdir, extra_args=['--warnlevel=1']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') + self.setconf('--warnlevel=0') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') + self.wipe() + + # verify we can override w/ -Dwarning_level + self.init(testdir, extra_args=['-Dwarning_level=1']) + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '1') + self.setconf('-Dwarning_level=0') + obj = mesonbuild.coredata.load(self.builddir) + self.assertEqual(obj.options[OptionKey('warning_level')].value, '0') + self.wipe() + + def test_feature_check_usage_subprojects(self): + testdir = os.path.join(self.unit_test_dir, '41 featurenew subprojects') + out = self.init(testdir) + # Parent project warns correctly + self.assertRegex(out, "WARNING: Project targeting '>=0.45'.*'0.47.0': dict") + # Subprojects warn correctly + self.assertRegex(out, r"\| WARNING: Project targeting '>=0.40'.*'0.44.0': disabler") + self.assertRegex(out, r"\| WARNING: Project targeting '!=0.40'.*'0.44.0': disabler") + # Subproject has a new-enough meson_version, no warning + self.assertNotRegex(out, "WARNING: Project targeting.*Python") + # Ensure a summary is printed in the subproject and the outer project + self.assertRegex(out, r"\| WARNING: Project specifies a minimum meson_version '>=0.40'") + self.assertRegex(out, r"\| \* 0.44.0: {'disabler'}") + self.assertRegex(out, "WARNING: Project specifies a minimum meson_version '>=0.45'") + self.assertRegex(out, " * 0.47.0: {'dict'}") + + def test_configure_file_warnings(self): + testdir = os.path.join(self.common_test_dir, "14 configure file") + out = self.init(testdir) + self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*") + self.assertRegex(out, "WARNING:.*'FOO_BAR'.*nosubst-nocopy2.txt.in.*not present.*") + self.assertRegex(out, "WARNING:.*'empty'.*config.h.in.*not present.*") + self.assertRegex(out, "WARNING:.*empty configuration_data.*test.py.in") + # Warnings for configuration files that are overwritten. + self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites") + self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites") + self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites") + self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites") + self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites") + # No warnings about empty configuration data objects passed to files with substitutions + self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in") + self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in") + with open(os.path.join(self.builddir, 'nosubst-nocopy1.txt'), 'rb') as f: + self.assertEqual(f.read().strip(), b'/* #undef FOO_BAR */') + with open(os.path.join(self.builddir, 'nosubst-nocopy2.txt'), 'rb') as f: + self.assertEqual(f.read().strip(), b'') + self.assertRegex(out, r"DEPRECATION:.*\['array'\] is invalid.*dict") + + def test_dirs(self): + with tempfile.TemporaryDirectory() as containing: + with tempfile.TemporaryDirectory(dir=containing) as srcdir: + mfile = os.path.join(srcdir, 'meson.build') + of = open(mfile, 'w', encoding='utf-8') + of.write("project('foobar', 'c')\n") + of.close() + pc = subprocess.run(self.setup_command, + cwd=srcdir, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL) + self.assertIn(b'Must specify at least one directory name', pc.stdout) + with tempfile.TemporaryDirectory(dir=srcdir) as builddir: + subprocess.run(self.setup_command, + check=True, + cwd=builddir, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + + def get_opts_as_dict(self): + result = {} + for i in self.introspect('--buildoptions'): + result[i['name']] = i['value'] + return result + + def test_buildtype_setting(self): + testdir = os.path.join(self.common_test_dir, '1 trivial') + self.init(testdir) + opts = self.get_opts_as_dict() + self.assertEqual(opts['buildtype'], 'debug') + self.assertEqual(opts['debug'], True) + self.setconf('-Ddebug=false') + opts = self.get_opts_as_dict() + self.assertEqual(opts['debug'], False) + self.assertEqual(opts['buildtype'], 'debug') + self.assertEqual(opts['optimization'], '0') + self.setconf('-Doptimization=g') + opts = self.get_opts_as_dict() + self.assertEqual(opts['debug'], False) + self.assertEqual(opts['buildtype'], 'debug') + self.assertEqual(opts['optimization'], 'g') + + @skipIfNoPkgconfig + @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows') + def test_native_dep_pkgconfig(self): + testdir = os.path.join(self.unit_test_dir, + '46 native dep pkgconfig var') + with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: + crossfile.write(textwrap.dedent( + '''[binaries] + pkgconfig = '{}' + + [properties] + + [host_machine] + system = 'linux' + cpu_family = 'arm' + cpu = 'armv7' + endian = 'little' + '''.format(os.path.join(testdir, 'cross_pkgconfig.py')))) + crossfile.flush() + self.meson_cross_file = crossfile.name + + env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir, + 'native_pkgconfig')} + self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env) + self.wipe() + self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env) + + @skipIfNoPkgconfig + @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows') + def test_pkg_config_libdir(self): + testdir = os.path.join(self.unit_test_dir, + '46 native dep pkgconfig var') + with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: + crossfile.write(textwrap.dedent( + '''[binaries] + pkgconfig = 'pkg-config' + + [properties] + pkg_config_libdir = ['{}'] + + [host_machine] + system = 'linux' + cpu_family = 'arm' + cpu = 'armv7' + endian = 'little' + '''.format(os.path.join(testdir, 'cross_pkgconfig')))) + crossfile.flush() + self.meson_cross_file = crossfile.name + + env = {'PKG_CONFIG_LIBDIR': os.path.join(testdir, + 'native_pkgconfig')} + self.init(testdir, extra_args=['-Dstart_native=false'], override_envvars=env) + self.wipe() + self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env) + + def __reconfigure(self, change_minor=False): + # Set an older version to force a reconfigure from scratch + filename = os.path.join(self.privatedir, 'coredata.dat') + with open(filename, 'rb') as f: + obj = pickle.load(f) + if change_minor: + v = mesonbuild.coredata.version.split('.') + obj.version = '.'.join(v[0:2] + [str(int(v[2]) + 1)]) + else: + obj.version = '0.47.0' + with open(filename, 'wb') as f: + pickle.dump(obj, f) + + def test_reconfigure(self): + testdir = os.path.join(self.unit_test_dir, '48 reconfigure') + self.init(testdir, extra_args=['-Dopt1=val1', '-Dsub1:werror=true']) + self.setconf('-Dopt2=val2') + + self.__reconfigure() + + out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) + self.assertRegex(out, 'Regenerating configuration from scratch') + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 default4') + self.assertRegex(out, 'sub1:werror True') + self.build() + self.run_tests() + + # Create a file in builddir and verify wipe command removes it + filename = os.path.join(self.builddir, 'something') + open(filename, 'w', encoding='utf-8').close() + self.assertTrue(os.path.exists(filename)) + out = self.init(testdir, extra_args=['--wipe', '-Dopt4=val4']) + self.assertFalse(os.path.exists(filename)) + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 val4') + self.assertRegex(out, 'sub1:werror True') + self.assertTrue(Path(self.builddir, '.gitignore').exists()) + self.build() + self.run_tests() + + def test_wipe_from_builddir(self): + testdir = os.path.join(self.common_test_dir, '157 custom target subdir depend files') + self.init(testdir) + self.__reconfigure() + + with Path(self.builddir): + self.init(testdir, extra_args=['--wipe']) + + def test_minor_version_does_not_reconfigure_wipe(self): + testdir = os.path.join(self.unit_test_dir, '48 reconfigure') + self.init(testdir, extra_args=['-Dopt1=val1']) + self.setconf('-Dopt2=val2') + + self.__reconfigure(change_minor=True) + + out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) + self.assertNotRegex(out, 'Regenerating configuration from scratch') + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 default4') + self.build() + self.run_tests() + + def test_target_construct_id_from_path(self): + # This id is stable but not guessable. + # The test is supposed to prevent unintentional + # changes of target ID generation. + target_id = Target.construct_id_from_path('some/obscure/subdir', + 'target-id', '@suffix') + self.assertEqual('5e002d3@@target-id@suffix', target_id) + target_id = Target.construct_id_from_path('subproject/foo/subdir/bar', + 'target2-id', '@other') + self.assertEqual('81d46d1@@target2-id@other', target_id) + + def test_introspect_projectinfo_without_configured_build(self): + testfile = os.path.join(self.common_test_dir, '33 run program', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), {'meson.build'}) + self.assertEqual(res['version'], 'undefined') + self.assertEqual(res['descriptive_name'], 'run command') + self.assertEqual(res['subprojects'], []) + + testfile = os.path.join(self.common_test_dir, '40 options', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), {'meson_options.txt', 'meson.build'}) + self.assertEqual(res['version'], 'undefined') + self.assertEqual(res['descriptive_name'], 'options') + self.assertEqual(res['subprojects'], []) + + testfile = os.path.join(self.common_test_dir, '43 subproject options', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), {'meson_options.txt', 'meson.build'}) + self.assertEqual(res['version'], 'undefined') + self.assertEqual(res['descriptive_name'], 'suboptions') + self.assertEqual(len(res['subprojects']), 1) + subproject_files = {f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files']} + self.assertEqual(subproject_files, {'subprojects/subproject/meson_options.txt', 'subprojects/subproject/meson.build'}) + self.assertEqual(res['subprojects'][0]['name'], 'subproject') + self.assertEqual(res['subprojects'][0]['version'], 'undefined') + self.assertEqual(res['subprojects'][0]['descriptive_name'], 'subproject') + + def test_introspect_projectinfo_subprojects(self): + testdir = os.path.join(self.common_test_dir, '98 subproject subdir') + self.init(testdir) + res = self.introspect('--projectinfo') + expected = { + 'descriptive_name': 'proj', + 'version': 'undefined', + 'subproject_dir': 'subprojects', + 'subprojects': [ + { + 'descriptive_name': 'sub', + 'name': 'sub', + 'version': '1.0' + }, + { + 'descriptive_name': 'sub_implicit', + 'name': 'sub_implicit', + 'version': '1.0', + }, + { + 'descriptive_name': 'sub-novar', + 'name': 'sub_novar', + 'version': '1.0', + }, + { + 'descriptive_name': 'subsub', + 'name': 'subsub', + 'version': 'undefined' + }, + { + 'descriptive_name': 'subsubsub', + 'name': 'subsubsub', + 'version': 'undefined' + }, + ] + } + res['subprojects'] = sorted(res['subprojects'], key=lambda i: i['name']) + self.assertDictEqual(expected, res) + + def test_introspection_target_subproject(self): + testdir = os.path.join(self.common_test_dir, '42 subproject') + self.init(testdir) + res = self.introspect('--targets') + + expected = { + 'sublib': 'sublib', + 'simpletest': 'sublib', + 'user': None + } + + for entry in res: + name = entry['name'] + self.assertEqual(entry['subproject'], expected[name]) + + def test_introspect_projectinfo_subproject_dir(self): + testdir = os.path.join(self.common_test_dir, '75 custom subproject dir') + self.init(testdir) + res = self.introspect('--projectinfo') + + self.assertEqual(res['subproject_dir'], 'custom_subproject_dir') + + def test_introspect_projectinfo_subproject_dir_from_source(self): + testfile = os.path.join(self.common_test_dir, '75 custom subproject dir', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + + self.assertEqual(res['subproject_dir'], 'custom_subproject_dir') + + @skipIfNoExecutable('clang-format') + def test_clang_format(self): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'Clang-format is for now only supported on Ninja, not {self.backend.name}') + testdir = os.path.join(self.unit_test_dir, '54 clang-format') + testfile = os.path.join(testdir, 'prog.c') + badfile = os.path.join(testdir, 'prog_orig_c') + goodfile = os.path.join(testdir, 'prog_expected_c') + testheader = os.path.join(testdir, 'header.h') + badheader = os.path.join(testdir, 'header_orig_h') + goodheader = os.path.join(testdir, 'header_expected_h') + try: + shutil.copyfile(badfile, testfile) + shutil.copyfile(badheader, testheader) + self.init(testdir) + self.assertNotEqual(Path(testfile).read_text(encoding='utf-8'), + Path(goodfile).read_text(encoding='utf-8')) + self.assertNotEqual(Path(testheader).read_text(encoding='utf-8'), + Path(goodheader).read_text(encoding='utf-8')) + self.run_target('clang-format') + self.assertEqual(Path(testheader).read_text(encoding='utf-8'), + Path(goodheader).read_text(encoding='utf-8')) + finally: + if os.path.exists(testfile): + os.unlink(testfile) + if os.path.exists(testheader): + os.unlink(testheader) + + @skipIfNoExecutable('clang-tidy') + def test_clang_tidy(self): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'Clang-tidy is for now only supported on Ninja, not {self.backend.name}') + if shutil.which('c++') is None: + raise unittest.SkipTest('Clang-tidy breaks when ccache is used and "c++" not in path.') + if is_osx(): + raise unittest.SkipTest('Apple ships a broken clang-tidy that chokes on -pipe.') + testdir = os.path.join(self.unit_test_dir, '69 clang-tidy') + dummydir = os.path.join(testdir, 'dummydir.h') + self.init(testdir, override_envvars={'CXX': 'c++'}) + out = self.run_target('clang-tidy') + self.assertIn('cttest.cpp:4:20', out) + self.assertNotIn(dummydir, out) + + def test_identity_cross(self): + testdir = os.path.join(self.unit_test_dir, '70 cross') + # Do a build to generate a cross file where the host is this target + self.init(testdir, extra_args=['-Dgenerate=true']) + self.meson_cross_file = os.path.join(self.builddir, "crossfile") + self.assertTrue(os.path.exists(self.meson_cross_file)) + # Now verify that this is detected as cross + self.new_builddir() + self.init(testdir) + + def test_introspect_buildoptions_without_configured_build(self): + testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions') + testfile = os.path.join(testdir, 'meson.build') + res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args) + self.init(testdir, default_args=False) + res_wb = self.introspect('--buildoptions') + self.maxDiff = None + # XXX: These now generate in a different order, is that okay? + self.assertListEqual(sorted(res_nb, key=lambda x: x['name']), sorted(res_wb, key=lambda x: x['name'])) + + def test_meson_configure_from_source_does_not_crash(self): + testdir = os.path.join(self.unit_test_dir, '59 introspect buildoptions') + self._run(self.mconf_command + [testdir]) + + def test_introspect_buildoptions_cross_only(self): + testdir = os.path.join(self.unit_test_dir, '83 cross only introspect') + testfile = os.path.join(testdir, 'meson.build') + res = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args) + optnames = [o['name'] for o in res] + self.assertIn('c_args', optnames) + self.assertNotIn('build.c_args', optnames) + + def test_introspect_json_flat(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + out = self.init(testdir, extra_args=['-Dlayout=flat']) + infodir = os.path.join(self.builddir, 'meson-info') + self.assertPathExists(infodir) + + with open(os.path.join(infodir, 'intro-targets.json'), encoding='utf-8') as fp: + targets = json.load(fp) + + for i in targets: + for out in i['filename']: + assert(os.path.relpath(out, self.builddir).startswith('meson-out')) + + def test_introspect_json_dump(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + self.init(testdir) + infodir = os.path.join(self.builddir, 'meson-info') + self.assertPathExists(infodir) + + def assertKeyTypes(key_type_list, obj, strict: bool = True): + for i in key_type_list: + if isinstance(i[1], (list, tuple)) and None in i[1]: + i = (i[0], tuple([x for x in i[1] if x is not None])) + if i[0] not in obj or obj[i[0]] is None: + continue + self.assertIn(i[0], obj) + self.assertIsInstance(obj[i[0]], i[1]) + if strict: + for k in obj.keys(): + found = False + for i in key_type_list: + if k == i[0]: + found = True + break + self.assertTrue(found, f'Key "{k}" not in expected list') + + root_keylist = [ + ('benchmarks', list), + ('buildoptions', list), + ('buildsystem_files', list), + ('dependencies', list), + ('installed', dict), + ('projectinfo', dict), + ('targets', list), + ('tests', list), + ] + + test_keylist = [ + ('cmd', list), + ('env', dict), + ('name', str), + ('timeout', int), + ('suite', list), + ('is_parallel', bool), + ('protocol', str), + ('depends', list), + ('workdir', (str, None)), + ('priority', int), + ] + + buildoptions_keylist = [ + ('name', str), + ('section', str), + ('type', str), + ('description', str), + ('machine', str), + ('choices', (list, None)), + ('value', (str, int, bool, list)), + ] + + buildoptions_typelist = [ + ('combo', str, [('choices', list)]), + ('string', str, []), + ('boolean', bool, []), + ('integer', int, []), + ('array', list, []), + ] + + buildoptions_sections = ['core', 'backend', 'base', 'compiler', 'directory', 'user', 'test'] + buildoptions_machines = ['any', 'build', 'host'] + + dependencies_typelist = [ + ('name', str), + ('version', str), + ('compile_args', list), + ('link_args', list), + ] + + targets_typelist = [ + ('name', str), + ('id', str), + ('type', str), + ('defined_in', str), + ('filename', list), + ('build_by_default', bool), + ('target_sources', list), + ('extra_files', list), + ('subproject', (str, None)), + ('install_filename', (list, None)), + ('installed', bool), + ] + + targets_sources_typelist = [ + ('language', str), + ('compiler', list), + ('parameters', list), + ('sources', list), + ('generated_sources', list), + ] + + # First load all files + res = {} + for i in root_keylist: + curr = os.path.join(infodir, 'intro-{}.json'.format(i[0])) + self.assertPathExists(curr) + with open(curr, encoding='utf-8') as fp: + res[i[0]] = json.load(fp) + + assertKeyTypes(root_keylist, res) + + # Match target ids to input and output files for ease of reference + src_to_id = {} + out_to_id = {} + name_to_out = {} + for i in res['targets']: + print(json.dump(i, sys.stdout)) + out_to_id.update({os.path.relpath(out, self.builddir): i['id'] + for out in i['filename']}) + name_to_out.update({i['name']: i['filename']}) + for group in i['target_sources']: + src_to_id.update({os.path.relpath(src, testdir): i['id'] + for src in group['sources']}) + + # Check Tests and benchmarks + tests_to_find = ['test case 1', 'test case 2', 'benchmark 1'] + deps_to_find = {'test case 1': [src_to_id['t1.cpp']], + 'test case 2': [src_to_id['t2.cpp'], src_to_id['t3.cpp']], + 'benchmark 1': [out_to_id['file2'], out_to_id['file3'], out_to_id['file4'], src_to_id['t3.cpp']]} + for i in res['benchmarks'] + res['tests']: + assertKeyTypes(test_keylist, i) + if i['name'] in tests_to_find: + tests_to_find.remove(i['name']) + self.assertEqual(sorted(i['depends']), + sorted(deps_to_find[i['name']])) + self.assertListEqual(tests_to_find, []) + + # Check buildoptions + buildopts_to_find = {'cpp_std': 'c++11'} + for i in res['buildoptions']: + assertKeyTypes(buildoptions_keylist, i) + valid_type = False + for j in buildoptions_typelist: + if i['type'] == j[0]: + self.assertIsInstance(i['value'], j[1]) + assertKeyTypes(j[2], i, strict=False) + valid_type = True + break + + self.assertIn(i['section'], buildoptions_sections) + self.assertIn(i['machine'], buildoptions_machines) + self.assertTrue(valid_type) + if i['name'] in buildopts_to_find: + self.assertEqual(i['value'], buildopts_to_find[i['name']]) + buildopts_to_find.pop(i['name'], None) + self.assertDictEqual(buildopts_to_find, {}) + + # Check buildsystem_files + bs_files = ['meson.build', 'meson_options.txt', 'sharedlib/meson.build', 'staticlib/meson.build'] + bs_files = [os.path.join(testdir, x) for x in bs_files] + self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files))) + + # Check dependencies + dependencies_to_find = ['threads'] + for i in res['dependencies']: + assertKeyTypes(dependencies_typelist, i) + if i['name'] in dependencies_to_find: + dependencies_to_find.remove(i['name']) + self.assertListEqual(dependencies_to_find, []) + + # Check projectinfo + self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []}) + + # Check targets + targets_to_find = { + 'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build', + [os.path.join(testdir, 'sharedlib', 'shared.cpp')]), + 'staticTestLib': ('static library', True, False, 'staticlib/meson.build', + [os.path.join(testdir, 'staticlib', 'static.c')]), + 'custom target test 1': ('custom', False, False, 'meson.build', + [os.path.join(testdir, 'cp.py')]), + 'custom target test 2': ('custom', False, False, 'meson.build', + name_to_out['custom target test 1']), + 'test1': ('executable', True, True, 'meson.build', + [os.path.join(testdir, 't1.cpp')]), + 'test2': ('executable', True, False, 'meson.build', + [os.path.join(testdir, 't2.cpp')]), + 'test3': ('executable', True, False, 'meson.build', + [os.path.join(testdir, 't3.cpp')]), + 'custom target test 3': ('custom', False, False, 'meson.build', + name_to_out['test3']), + } + for i in res['targets']: + assertKeyTypes(targets_typelist, i) + if i['name'] in targets_to_find: + tgt = targets_to_find[i['name']] + self.assertEqual(i['type'], tgt[0]) + self.assertEqual(i['build_by_default'], tgt[1]) + self.assertEqual(i['installed'], tgt[2]) + self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3])) + targets_to_find.pop(i['name'], None) + for j in i['target_sources']: + assertKeyTypes(targets_sources_typelist, j) + self.assertEqual(j['sources'], [os.path.normpath(f) for f in tgt[4]]) + self.assertDictEqual(targets_to_find, {}) + + def test_introspect_file_dump_equals_all(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + self.init(testdir) + res_all = self.introspect('--all') + res_file = {} + + root_keylist = [ + 'benchmarks', + 'buildoptions', + 'buildsystem_files', + 'dependencies', + 'installed', + 'projectinfo', + 'targets', + 'tests', + ] + + infodir = os.path.join(self.builddir, 'meson-info') + self.assertPathExists(infodir) + for i in root_keylist: + curr = os.path.join(infodir, f'intro-{i}.json') + self.assertPathExists(curr) + with open(curr, encoding='utf-8') as fp: + res_file[i] = json.load(fp) + + self.assertEqual(res_all, res_file) + + def test_introspect_meson_info(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + introfile = os.path.join(self.builddir, 'meson-info', 'meson-info.json') + self.init(testdir) + self.assertPathExists(introfile) + with open(introfile, encoding='utf-8') as fp: + res1 = json.load(fp) + + for i in ['meson_version', 'directories', 'introspection', 'build_files_updated', 'error']: + self.assertIn(i, res1) + + self.assertEqual(res1['error'], False) + self.assertEqual(res1['build_files_updated'], True) + + def test_introspect_config_update(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json') + self.init(testdir) + self.assertPathExists(introfile) + with open(introfile, encoding='utf-8') as fp: + res1 = json.load(fp) + + for i in res1: + if i['name'] == 'cpp_std': + i['value'] = 'c++14' + if i['name'] == 'build.cpp_std': + i['value'] = 'c++14' + if i['name'] == 'buildtype': + i['value'] = 'release' + if i['name'] == 'optimization': + i['value'] = '3' + if i['name'] == 'debug': + i['value'] = False + + self.setconf('-Dcpp_std=c++14') + self.setconf('-Dbuildtype=release') + + with open(introfile, encoding='utf-8') as fp: + res2 = json.load(fp) + + self.assertListEqual(res1, res2) + + def test_introspect_targets_from_source(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + testfile = os.path.join(testdir, 'meson.build') + introfile = os.path.join(self.builddir, 'meson-info', 'intro-targets.json') + self.init(testdir) + self.assertPathExists(introfile) + with open(introfile, encoding='utf-8') as fp: + res_wb = json.load(fp) + + res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args) + + # Account for differences in output + res_wb = [i for i in res_wb if i['type'] != 'custom'] + for i in res_wb: + i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']] + if 'install_filename' in i: + del i['install_filename'] + + sources = [] + for j in i['target_sources']: + sources += j['sources'] + i['target_sources'] = [{ + 'language': 'unknown', + 'compiler': [], + 'parameters': [], + 'sources': sources, + 'generated_sources': [] + }] + + self.maxDiff = None + self.assertListEqual(res_nb, res_wb) + + def test_introspect_ast_source(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + testfile = os.path.join(testdir, 'meson.build') + res_nb = self.introspect_directory(testfile, ['--ast'] + self.meson_args) + + node_counter = {} + + def accept_node(json_node): + self.assertIsInstance(json_node, dict) + for i in ['lineno', 'colno', 'end_lineno', 'end_colno']: + self.assertIn(i, json_node) + self.assertIsInstance(json_node[i], int) + self.assertIn('node', json_node) + n = json_node['node'] + self.assertIsInstance(n, str) + self.assertIn(n, nodes) + if n not in node_counter: + node_counter[n] = 0 + node_counter[n] = node_counter[n] + 1 + for nodeDesc in nodes[n]: + key = nodeDesc[0] + func = nodeDesc[1] + self.assertIn(key, json_node) + if func is None: + tp = nodeDesc[2] + self.assertIsInstance(json_node[key], tp) + continue + func(json_node[key]) + + def accept_node_list(node_list): + self.assertIsInstance(node_list, list) + for i in node_list: + accept_node(i) + + def accept_kwargs(kwargs): + self.assertIsInstance(kwargs, list) + for i in kwargs: + self.assertIn('key', i) + self.assertIn('val', i) + accept_node(i['key']) + accept_node(i['val']) + + nodes = { + 'BooleanNode': [('value', None, bool)], + 'IdNode': [('value', None, str)], + 'NumberNode': [('value', None, int)], + 'StringNode': [('value', None, str)], + 'FormatStringNode': [('value', None, str)], + 'ContinueNode': [], + 'BreakNode': [], + 'ArgumentNode': [('positional', accept_node_list), ('kwargs', accept_kwargs)], + 'ArrayNode': [('args', accept_node)], + 'DictNode': [('args', accept_node)], + 'EmptyNode': [], + 'OrNode': [('left', accept_node), ('right', accept_node)], + 'AndNode': [('left', accept_node), ('right', accept_node)], + 'ComparisonNode': [('left', accept_node), ('right', accept_node), ('ctype', None, str)], + 'ArithmeticNode': [('left', accept_node), ('right', accept_node), ('op', None, str)], + 'NotNode': [('right', accept_node)], + 'CodeBlockNode': [('lines', accept_node_list)], + 'IndexNode': [('object', accept_node), ('index', accept_node)], + 'MethodNode': [('object', accept_node), ('args', accept_node), ('name', None, str)], + 'FunctionNode': [('args', accept_node), ('name', None, str)], + 'AssignmentNode': [('value', accept_node), ('var_name', None, str)], + 'PlusAssignmentNode': [('value', accept_node), ('var_name', None, str)], + 'ForeachClauseNode': [('items', accept_node), ('block', accept_node), ('varnames', None, list)], + 'IfClauseNode': [('ifs', accept_node_list), ('else', accept_node)], + 'IfNode': [('condition', accept_node), ('block', accept_node)], + 'UMinusNode': [('right', accept_node)], + 'TernaryNode': [('condition', accept_node), ('true', accept_node), ('false', accept_node)], + } + + accept_node(res_nb) + + for n, c in [('ContinueNode', 2), ('BreakNode', 1), ('NotNode', 3)]: + self.assertIn(n, node_counter) + self.assertEqual(node_counter[n], c) + + def test_introspect_dependencies_from_source(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + testfile = os.path.join(testdir, 'meson.build') + res_nb = self.introspect_directory(testfile, ['--scan-dependencies'] + self.meson_args) + expected = [ + { + 'name': 'threads', + 'required': True, + 'version': [], + 'has_fallback': False, + 'conditional': False + }, + { + 'name': 'zlib', + 'required': False, + 'version': [], + 'has_fallback': False, + 'conditional': False + }, + { + 'name': 'bugDep1', + 'required': True, + 'version': [], + 'has_fallback': False, + 'conditional': False + }, + { + 'name': 'somethingthatdoesnotexist', + 'required': True, + 'version': ['>=1.2.3'], + 'has_fallback': False, + 'conditional': True + }, + { + 'name': 'look_i_have_a_fallback', + 'required': True, + 'version': ['>=1.0.0', '<=99.9.9'], + 'has_fallback': True, + 'conditional': True + } + ] + self.maxDiff = None + self.assertListEqual(res_nb, expected) + + def test_unstable_coredata(self): + testdir = os.path.join(self.common_test_dir, '1 trivial') + self.init(testdir) + # just test that the command does not fail (e.g. because it throws an exception) + self._run([*self.meson_command, 'unstable-coredata', self.builddir]) + + @skip_if_no_cmake + def test_cmake_prefix_path(self): + testdir = os.path.join(self.unit_test_dir, '63 cmake_prefix_path') + self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')]) + + @skip_if_no_cmake + def test_cmake_parser(self): + testdir = os.path.join(self.unit_test_dir, '64 cmake parser') + self.init(testdir, extra_args=['-Dcmake_prefix_path=' + os.path.join(testdir, 'prefix')]) + + def test_alias_target(self): + if self.backend is Backend.vs: + # FIXME: This unit test is broken with vs backend, needs investigation + raise unittest.SkipTest(f'Skipping alias_target test with {self.backend.name} backend') + testdir = os.path.join(self.unit_test_dir, '65 alias target') + self.init(testdir) + self.build() + self.assertPathDoesNotExist(os.path.join(self.builddir, 'prog' + exe_suffix)) + self.assertPathDoesNotExist(os.path.join(self.builddir, 'hello.txt')) + self.run_target('build-all') + self.assertPathExists(os.path.join(self.builddir, 'prog' + exe_suffix)) + self.assertPathExists(os.path.join(self.builddir, 'hello.txt')) + + def test_configure(self): + testdir = os.path.join(self.common_test_dir, '2 cpp') + self.init(testdir) + self._run(self.mconf_command + [self.builddir]) + + def test_summary(self): + testdir = os.path.join(self.unit_test_dir, '72 summary') + out = self.init(testdir) + expected = textwrap.dedent(r''' + Some Subproject 2.0 + + string : bar + integer: 1 + boolean: True + + subsub undefined + + Something: Some value + + My Project 1.0 + + Configuration + Some boolean : False + Another boolean: True + Some string : Hello World + A list : string + 1 + True + empty list : + enabled_opt : enabled + A number : 1 + yes : YES + no : NO + coma list : a, b, c + + Stuff + missing prog : NO + existing prog : ''' + sys.executable + ''' + missing dep : NO + external dep : YES 1.2.3 + internal dep : YES + + Plugins + long coma list : alpha, alphacolor, apetag, audiofx, audioparsers, auparse, + autodetect, avi + + Subprojects + sub : YES + sub2 : NO Problem encountered: This subproject failed + subsub : YES + ''') + expected_lines = expected.split('\n')[1:] + out_start = out.find(expected_lines[0]) + out_lines = out[out_start:].split('\n')[:len(expected_lines)] + if sys.version_info < (3, 7, 0): + # Dictionary order is not stable in Python <3.7, so sort the lines + # while comparing + expected_lines = sorted(expected_lines) + out_lines = sorted(out_lines) + for e, o in zip(expected_lines, out_lines): + if e.startswith(' external dep'): + self.assertRegex(o, r'^ external dep : (YES [0-9.]*|NO)$') + else: + self.assertEqual(o, e) + + def test_meson_compile(self): + """Test the meson compile command.""" + + def get_exe_name(basename: str) -> str: + if is_windows(): + return f'{basename}.exe' + else: + return basename + + def get_shared_lib_name(basename: str) -> str: + if mesonbuild.environment.detect_msys2_arch(): + return f'lib{basename}.dll' + elif is_windows(): + return f'{basename}.dll' + elif is_cygwin(): + return f'cyg{basename}.dll' + elif is_osx(): + return f'lib{basename}.dylib' + else: + return f'lib{basename}.so' + + def get_static_lib_name(basename: str) -> str: + return f'lib{basename}.a' + + # Base case (no targets or additional arguments) + + testdir = os.path.join(self.common_test_dir, '1 trivial') + self.init(testdir) + + self._run([*self.meson_command, 'compile', '-C', self.builddir]) + self.assertPathExists(os.path.join(self.builddir, get_exe_name('trivialprog'))) + + # `--clean` + + self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean']) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) + + # Target specified in a project with unique names + + testdir = os.path.join(self.common_test_dir, '6 linkshared') + self.init(testdir, extra_args=['--wipe']) + # Multiple targets and target type specified + self._run([*self.meson_command, 'compile', '-C', self.builddir, 'mylib', 'mycpplib:shared_library']) + # Check that we have a shared lib, but not an executable, i.e. check that target actually worked + self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mylib'))) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('prog'))) + self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mycpplib'))) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('cppprog'))) + + # Target specified in a project with non unique names + + testdir = os.path.join(self.common_test_dir, '185 same target name') + self.init(testdir, extra_args=['--wipe']) + self._run([*self.meson_command, 'compile', '-C', self.builddir, './foo']) + self.assertPathExists(os.path.join(self.builddir, get_static_lib_name('foo'))) + self._run([*self.meson_command, 'compile', '-C', self.builddir, 'sub/foo']) + self.assertPathExists(os.path.join(self.builddir, 'sub', get_static_lib_name('foo'))) + + # run_target + + testdir = os.path.join(self.common_test_dir, '51 run target') + self.init(testdir, extra_args=['--wipe']) + out = self._run([*self.meson_command, 'compile', '-C', self.builddir, 'py3hi']) + self.assertIn('I am Python3.', out) + + # `--$BACKEND-args` + + testdir = os.path.join(self.common_test_dir, '1 trivial') + if self.backend is Backend.ninja: + self.init(testdir, extra_args=['--wipe']) + # Dry run - should not create a program + self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n']) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) + elif self.backend is Backend.vs: + self.init(testdir, extra_args=['--wipe']) + self._run([*self.meson_command, 'compile', '-C', self.builddir]) + # Explicitly clean the target through msbuild interface + self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', get_exe_name('trivialprog')))]) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) + + def test_spurious_reconfigure_built_dep_file(self): + testdir = os.path.join(self.unit_test_dir, '74 dep files') + + # Regression test: Spurious reconfigure was happening when build + # directory is inside source directory. + # See https://gitlab.freedesktop.org/gstreamer/gst-build/-/issues/85. + srcdir = os.path.join(self.builddir, 'srctree') + shutil.copytree(testdir, srcdir) + builddir = os.path.join(srcdir, '_build') + self.change_builddir(builddir) + + self.init(srcdir) + self.build() + + # During first configure the file did not exist so no dependency should + # have been set. A rebuild should not trigger a reconfigure. + self.clean() + out = self.build() + self.assertNotIn('Project configured', out) + + self.init(srcdir, extra_args=['--reconfigure']) + + # During the reconfigure the file did exist, but is inside build + # directory, so no dependency should have been set. A rebuild should not + # trigger a reconfigure. + self.clean() + out = self.build() + self.assertNotIn('Project configured', out) + + def _test_junit(self, case: str) -> None: + try: + import lxml.etree as et + except ImportError: + raise unittest.SkipTest('lxml required, but not found.') + + schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd'))) + + self.init(case) + self.run_tests() + + junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml')) + try: + schema.assertValid(junit) + except et.DocumentInvalid as e: + self.fail(e.error_log) + + def test_junit_valid_tap(self): + self._test_junit(os.path.join(self.common_test_dir, '206 tap tests')) + + def test_junit_valid_exitcode(self): + self._test_junit(os.path.join(self.common_test_dir, '41 test args')) + + def test_junit_valid_gtest(self): + self._test_junit(os.path.join(self.framework_test_dir, '2 gtest')) + + def test_link_language_linker(self): + # TODO: there should be some way to query how we're linking things + # without resorting to reading the ninja.build file + if self.backend is not Backend.ninja: + raise unittest.SkipTest('This test reads the ninja file') + + testdir = os.path.join(self.common_test_dir, '225 link language') + self.init(testdir) + + build_ninja = os.path.join(self.builddir, 'build.ninja') + with open(build_ninja, encoding='utf-8') as f: + contents = f.read() + + self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER') + self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER') + + def test_commands_documented(self): + ''' + Test that all listed meson commands are documented in Commands.md. + ''' + + # The docs directory is not in release tarballs. + if not os.path.isdir('docs'): + raise unittest.SkipTest('Doc directory does not exist.') + doc_path = 'docs/markdown/Commands.md' + + md = None + with open(doc_path, encoding='utf-8') as f: + md = f.read() + self.assertIsNotNone(md) + + ## Get command sections + + section_pattern = re.compile(r'^### (.+)$', re.MULTILINE) + md_command_section_matches = [i for i in section_pattern.finditer(md)] + md_command_sections = dict() + for i, s in enumerate(md_command_section_matches): + section_end = len(md) if i == len(md_command_section_matches) - 1 else md_command_section_matches[i + 1].start() + md_command_sections[s.group(1)] = (s.start(), section_end) + + ## Validate commands + + md_commands = {k for k,v in md_command_sections.items()} + + help_output = self._run(self.meson_command + ['--help']) + help_commands = {c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(',')} + + self.assertEqual(md_commands | {'help'}, help_commands, f'Doc file: `{doc_path}`') + + ## Validate that each section has proper placeholders + + def get_data_pattern(command): + return re.compile( + r'{{ ' + command + r'_usage.inc }}[\r\n]' + r'.*?' + r'{{ ' + command + r'_arguments.inc }}[\r\n]', + flags = re.MULTILINE|re.DOTALL) + + for command in md_commands: + m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1]) + self.assertIsNotNone(m, f'Command `{command}` is missing placeholders for dynamic data. Doc file: `{doc_path}`') + + def _check_coverage_files(self, types=('text', 'xml', 'html')): + covdir = Path(self.builddir) / 'meson-logs' + files = [] + if 'text' in types: + files.append('coverage.txt') + if 'xml' in types: + files.append('coverage.xml') + if 'html' in types: + files.append('coveragereport/index.html') + for f in files: + self.assertTrue((covdir / f).is_file(), msg=f'{f} is not a file') + + def test_coverage(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage') + self._check_coverage_files() + + def test_coverage_complex(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '105 generatorcustom') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage') + self._check_coverage_files() + + def test_coverage_html(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage-html') + self._check_coverage_files(['html']) + + def test_coverage_text(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage-text') + self._check_coverage_files(['text']) + + def test_coverage_xml(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage-xml') + self._check_coverage_files(['xml']) + + def test_coverage_escaping(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '243 escape++') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage') + self._check_coverage_files() + + def test_cross_file_constants(self): + with temp_filename() as crossfile1, temp_filename() as crossfile2: + with open(crossfile1, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent( + ''' + [constants] + compiler = 'gcc' + ''')) + with open(crossfile2, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent( + ''' + [constants] + toolchain = '/toolchain/' + common_flags = ['--sysroot=' + toolchain / 'sysroot'] + + [properties] + c_args = common_flags + ['-DSOMETHING'] + cpp_args = c_args + ['-DSOMETHING_ELSE'] + + [binaries] + c = toolchain / compiler + ''')) + + values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2]) + self.assertEqual(values['binaries']['c'], '/toolchain/gcc') + self.assertEqual(values['properties']['c_args'], + ['--sysroot=/toolchain/sysroot', '-DSOMETHING']) + self.assertEqual(values['properties']['cpp_args'], + ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE']) + + @unittest.skipIf(is_windows(), 'Directory cleanup fails for some reason') + def test_wrap_git(self): + with tempfile.TemporaryDirectory() as tmpdir: + srcdir = os.path.join(tmpdir, 'src') + shutil.copytree(os.path.join(self.unit_test_dir, '81 wrap-git'), srcdir) + upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream') + upstream_uri = Path(upstream).as_uri() + _git_init(upstream) + with open(os.path.join(srcdir, 'subprojects', 'wrap_git.wrap'), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [wrap-git] + url = {} + patch_directory = wrap_git_builddef + revision = master + '''.format(upstream_uri))) + self.init(srcdir) + self.build() + self.run_tests() + + def test_multi_output_custom_target_no_warning(self): + testdir = os.path.join(self.common_test_dir, '228 custom_target source') + + out = self.init(testdir) + self.assertNotRegex(out, 'WARNING:.*Using the first one.') + self.build() + self.run_tests() + + @unittest.skipUnless(is_linux() and (re.search('^i.86$|^x86$|^x64$|^x86_64$|^amd64$', platform.processor()) is not None), + 'Requires ASM compiler for x86 or x86_64 platform currently only available on Linux CI runners') + def test_nostdlib(self): + testdir = os.path.join(self.unit_test_dir, '78 nostdlib') + machinefile = os.path.join(self.builddir, 'machine.txt') + with open(machinefile, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [properties] + c_stdlib = 'mylibc' + ''')) + + # Test native C stdlib + self.meson_native_file = machinefile + self.init(testdir) + self.build() + + # Test cross C stdlib + self.new_builddir() + self.meson_native_file = None + self.meson_cross_file = machinefile + self.init(testdir) + self.build() + + def test_meson_version_compare(self): + testdir = os.path.join(self.unit_test_dir, '82 meson version compare') + out = self.init(testdir) + self.assertNotRegex(out, r'WARNING') + + def test_wrap_redirect(self): + redirect_wrap = os.path.join(self.builddir, 'redirect.wrap') + real_wrap = os.path.join(self.builddir, 'foo/subprojects/real.wrap') + os.makedirs(os.path.dirname(real_wrap)) + + # Invalid redirect, filename must have .wrap extension + with open(redirect_wrap, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [wrap-redirect] + filename = foo/subprojects/real.wrapper + ''')) + with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be a .wrap file'): + PackageDefinition(redirect_wrap) + + # Invalid redirect, filename cannot be in parent directory + with open(redirect_wrap, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [wrap-redirect] + filename = ../real.wrap + ''')) + with self.assertRaisesRegex(WrapException, 'wrap-redirect filename cannot contain ".."'): + PackageDefinition(redirect_wrap) + + # Invalid redirect, filename must be in foo/subprojects/real.wrap + with open(redirect_wrap, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [wrap-redirect] + filename = foo/real.wrap + ''')) + with self.assertRaisesRegex(WrapException, 'wrap-redirect filename must be in the form foo/subprojects/bar.wrap'): + wrap = PackageDefinition(redirect_wrap) + + # Correct redirect + with open(redirect_wrap, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [wrap-redirect] + filename = foo/subprojects/real.wrap + ''')) + with open(real_wrap, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(''' + [wrap-git] + url = http://invalid + ''')) + wrap = PackageDefinition(redirect_wrap) + self.assertEqual(wrap.get('url'), 'http://invalid') + + @skip_if_no_cmake + def test_nested_cmake_rebuild(self) -> None: + # This checks a bug where if a non-meson project is used as a third + # level (or deeper) subproject it doesn't cause a rebuild if the build + # files for that project are changed + testdir = os.path.join(self.unit_test_dir, '85 nested subproject regenerate depends') + cmakefile = Path(testdir) / 'subprojects' / 'sub2' / 'CMakeLists.txt' + self.init(testdir) + self.build() + with cmakefile.open('a', encoding='utf-8') as f: + os.utime(str(cmakefile)) + self.assertReconfiguredBuildIsNoop() + + def test_version_file(self): + srcdir = os.path.join(self.common_test_dir, '2 cpp') + self.init(srcdir) + projinfo = self.introspect('--projectinfo') + self.assertEqual(projinfo['version'], '1.0.0') + + def test_cflags_cppflags(self): + envs = {'CPPFLAGS': '-DCPPFLAG', + 'CFLAGS': '-DCFLAG', + 'CXXFLAGS': '-DCXXFLAG'} + srcdir = os.path.join(self.unit_test_dir, '89 multiple envvars') + self.init(srcdir, override_envvars=envs) + self.build() + + def test_build_b_options(self) -> None: + # Currently (0.57) these do nothing, but they've always been allowed + srcdir = os.path.join(self.common_test_dir, '2 cpp') + self.init(srcdir, extra_args=['-Dbuild.b_lto=true']) + + def test_install_skip_subprojects(self): + testdir = os.path.join(self.unit_test_dir, '92 install skip subprojects') + self.init(testdir) + self.build() + + main_expected = [ + '', + 'share', + 'include', + 'foo', + 'bin', + 'share/foo', + 'share/foo/foo.dat', + 'include/foo.h', + 'foo/foofile', + 'bin/foo' + exe_suffix, + ] + bar_expected = [ + 'bar', + 'share/foo/bar.dat', + 'include/bar.h', + 'bin/bar' + exe_suffix, + 'bar/barfile' + ] + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_argument_syntax() == 'msvc': + main_expected.append('bin/foo.pdb') + bar_expected.append('bin/bar.pdb') + prefix = destdir_join(self.installdir, self.prefix) + main_expected = [Path(prefix, p) for p in main_expected] + bar_expected = [Path(prefix, p) for p in bar_expected] + all_expected = main_expected + bar_expected + + def check_installed_files(extra_args, expected): + args = ['install', '--destdir', self.installdir] + extra_args + self._run(self.meson_command + args, workdir=self.builddir) + all_files = [p for p in Path(self.installdir).rglob('*')] + self.assertEqual(sorted(expected), sorted(all_files)) + windows_proof_rmtree(self.installdir) + + check_installed_files([], all_expected) + check_installed_files(['--skip-subprojects'], main_expected) + check_installed_files(['--skip-subprojects', 'bar'], main_expected) + check_installed_files(['--skip-subprojects', 'another'], all_expected) + + def test_adding_subproject_to_configure_project(self) -> None: + srcdir = os.path.join(self.unit_test_dir, '93 new subproject in configured project') + self.init(srcdir) + self.build() + self.setconf('-Duse-sub=true') + self.build() + + def test_devenv(self): + testdir = os.path.join(self.unit_test_dir, '91 devenv') + self.init(testdir) + self.build() + + cmd = self.meson_command + ['devenv', '-C', self.builddir] + script = os.path.join(testdir, 'test-devenv.py') + app = os.path.join(self.builddir, 'app') + self._run(cmd + python_command + [script]) + self.assertEqual('This is text.', self._run(cmd + [app]).strip()) + + def test_clang_format_check(self): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'Skipping clang-format tests with {self.backend.name} backend') + if not shutil.which('clang-format'): + raise unittest.SkipTest('clang-format not found') + + testdir = os.path.join(self.unit_test_dir, '94 clangformat') + newdir = os.path.join(self.builddir, 'testdir') + shutil.copytree(testdir, newdir) + self.new_builddir() + self.init(newdir) + + # Should reformat 1 file but not return error + output = self.build('clang-format') + self.assertEqual(1, output.count('File reformatted:')) + + # Reset source tree then try again with clang-format-check, it should + # return an error code this time. + windows_proof_rmtree(newdir) + shutil.copytree(testdir, newdir) + with self.assertRaises(subprocess.CalledProcessError): + output = self.build('clang-format-check') + self.assertEqual(1, output.count('File reformatted:')) + + # The check format should not touch any files. Thus + # running format again has some work to do. + output = self.build('clang-format') + self.assertEqual(1, output.count('File reformatted:')) + self.build('clang-format-check') + + def test_custom_target_implicit_include(self): + testdir = os.path.join(self.unit_test_dir, '95 custominc') + self.init(testdir) + self.build() + compdb = self.get_compdb() + matches = 0 + for c in compdb: + if 'prog.c' in c['file']: + self.assertNotIn('easytogrepfor', c['command']) + matches += 1 + self.assertEqual(matches, 1) + matches = 0 + for c in compdb: + if 'prog2.c' in c['file']: + self.assertIn('easytogrepfor', c['command']) + matches += 1 + self.assertEqual(matches, 1) + + def test_env_flags_to_linker(self) -> None: + # Compilers that act as drivers should add their compiler flags to the + # linker, those that do not shouldn't + with mock.patch.dict(os.environ, {'CFLAGS': '-DCFLAG', 'LDFLAGS': '-flto'}): + env = get_fake_env() + + # Get the compiler so we know which compiler class to mock. + cc = mesonbuild.compilers.detect_compiler_for(env, 'c', MachineChoice.HOST) + cc_type = type(cc) + + # Test a compiler that acts as a linker + with mock.patch.object(cc_type, 'INVOKES_LINKER', True): + cc = mesonbuild.compilers.detect_compiler_for(env, 'c', MachineChoice.HOST) + link_args = env.coredata.get_external_link_args(cc.for_machine, cc.language) + self.assertEqual(sorted(link_args), sorted(['-DCFLAG', '-flto'])) + + # And one that doesn't + with mock.patch.object(cc_type, 'INVOKES_LINKER', False): + cc = mesonbuild.compilers.detect_compiler_for(env, 'c', MachineChoice.HOST) + link_args = env.coredata.get_external_link_args(cc.for_machine, cc.language) + self.assertEqual(sorted(link_args), sorted(['-flto'])) + + @skip_if_not_language('rust') + def test_rust_rlib_linkage(self) -> None: + if self.backend is not Backend.ninja: + raise unittest.SkipTest('Rust is only supported with ninja currently') + template = textwrap.dedent('''\ + use std::process::exit; + + pub fn fun() {{ + exit({}); + }} + ''') + + testdir = os.path.join(self.unit_test_dir, '100 rlib linkage') + gen_file = os.path.join(testdir, 'lib.rs') + with open(gen_file, 'w', encoding='utf-8') as f: + f.write(template.format(0)) + self.addCleanup(windows_proof_rm, gen_file) + + self.init(testdir) + self.build() + self.run_tests() + + with open(gen_file, 'w', encoding='utf-8') as f: + f.write(template.format(39)) + + self.build() + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.run_tests() + self.assertEqual(cm.exception.returncode, 1) + self.assertIn('exit status 39', cm.exception.stdout) + +class FailureTests(BasePlatformTests): + ''' + Tests that test failure conditions. Build files here should be dynamically + generated and static tests should go into `test cases/failing*`. + This is useful because there can be many ways in which a particular + function can fail, and creating failing tests for all of them is tedious + and slows down testing. + ''' + dnf = "[Dd]ependency.*not found(:.*)?" + nopkg = '[Pp]kg-config.*not found' + + def setUp(self): + super().setUp() + self.srcdir = os.path.realpath(tempfile.mkdtemp()) + self.mbuild = os.path.join(self.srcdir, 'meson.build') + self.moptions = os.path.join(self.srcdir, 'meson_options.txt') + + def tearDown(self): + super().tearDown() + windows_proof_rmtree(self.srcdir) + + def assertMesonRaises(self, contents, match, *, + extra_args=None, + langs=None, + meson_version=None, + options=None, + override_envvars=None): + ''' + Assert that running meson configure on the specified @contents raises + a error message matching regex @match. + ''' + if langs is None: + langs = [] + with open(self.mbuild, 'w', encoding='utf-8') as f: + f.write("project('failure test', 'c', 'cpp'") + if meson_version: + f.write(f", meson_version: '{meson_version}'") + f.write(")\n") + for lang in langs: + f.write(f"add_languages('{lang}', required : false)\n") + f.write(contents) + if options is not None: + with open(self.moptions, 'w', encoding='utf-8') as f: + f.write(options) + o = {'MESON_FORCE_BACKTRACE': '1'} + if override_envvars is None: + override_envvars = o + else: + override_envvars.update(o) + # Force tracebacks so we can detect them properly + with self.assertRaisesRegex(MesonException, match, msg=contents): + # Must run in-process or we'll get a generic CalledProcessError + self.init(self.srcdir, extra_args=extra_args, + inprocess=True, + override_envvars = override_envvars) + + def obtainMesonOutput(self, contents, match, extra_args, langs, meson_version=None): + if langs is None: + langs = [] + with open(self.mbuild, 'w', encoding='utf-8') as f: + f.write("project('output test', 'c', 'cpp'") + if meson_version: + f.write(f", meson_version: '{meson_version}'") + f.write(")\n") + for lang in langs: + f.write(f"add_languages('{lang}', required : false)\n") + f.write(contents) + # Run in-process for speed and consistency with assertMesonRaises + return self.init(self.srcdir, extra_args=extra_args, inprocess=True) + + def assertMesonOutputs(self, contents, match, extra_args=None, langs=None, meson_version=None): + ''' + Assert that running meson configure on the specified @contents outputs + something that matches regex @match. + ''' + out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version) + self.assertRegex(out, match) + + def assertMesonDoesNotOutput(self, contents, match, extra_args=None, langs=None, meson_version=None): + ''' + Assert that running meson configure on the specified @contents does not output + something that matches regex @match. + ''' + out = self.obtainMesonOutput(contents, match, extra_args, langs, meson_version) + self.assertNotRegex(out, match) + + @skipIfNoPkgconfig + def test_dependency(self): + if subprocess.call(['pkg-config', '--exists', 'zlib']) != 0: + raise unittest.SkipTest('zlib not found with pkg-config') + a = (("dependency('zlib', method : 'fail')", "'fail' is invalid"), + ("dependency('zlib', static : '1')", "[Ss]tatic.*boolean"), + ("dependency('zlib', version : 1)", "Item must be a list or one of "), + ("dependency('zlib', required : 1)", "[Rr]equired.*boolean"), + ("dependency('zlib', method : 1)", "[Mm]ethod.*string"), + ("dependency('zlibfail')", self.dnf),) + for contents, match in a: + self.assertMesonRaises(contents, match) + + def test_apple_frameworks_dependency(self): + if not is_osx(): + raise unittest.SkipTest('only run on macOS') + self.assertMesonRaises("dependency('appleframeworks')", + "requires at least one module") + + def test_extraframework_dependency_method(self): + code = "dependency('python', method : 'extraframework')" + if not is_osx(): + self.assertMesonRaises(code, self.dnf) + else: + # Python2 framework is always available on macOS + self.assertMesonOutputs(code, '[Dd]ependency.*python.*found.*YES') + + def test_sdl2_notfound_dependency(self): + # Want to test failure, so skip if available + if shutil.which('sdl2-config'): + raise unittest.SkipTest('sdl2-config found') + self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf) + if shutil.which('pkg-config'): + self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf) + with no_pkgconfig(): + # Look for pkg-config, cache it, then + # Use cached pkg-config without erroring out, then + # Use cached pkg-config to error out + code = "dependency('foobarrr', method : 'pkg-config', required : false)\n" \ + "dependency('foobarrr2', method : 'pkg-config', required : false)\n" \ + "dependency('sdl2', method : 'pkg-config')" + self.assertMesonRaises(code, self.nopkg) + + def test_gnustep_notfound_dependency(self): + # Want to test failure, so skip if available + if shutil.which('gnustep-config'): + raise unittest.SkipTest('gnustep-config found') + self.assertMesonRaises("dependency('gnustep')", + f"(requires a Objc compiler|{self.dnf})", + langs = ['objc']) + + def test_wx_notfound_dependency(self): + # Want to test failure, so skip if available + if shutil.which('wx-config-3.0') or shutil.which('wx-config') or shutil.which('wx-config-gtk3'): + raise unittest.SkipTest('wx-config, wx-config-3.0 or wx-config-gtk3 found') + self.assertMesonRaises("dependency('wxwidgets')", self.dnf) + self.assertMesonOutputs("dependency('wxwidgets', required : false)", + "Run-time dependency .*WxWidgets.* found: .*NO.*") + + def test_wx_dependency(self): + if not shutil.which('wx-config-3.0') and not shutil.which('wx-config') and not shutil.which('wx-config-gtk3'): + raise unittest.SkipTest('Neither wx-config, wx-config-3.0 nor wx-config-gtk3 found') + self.assertMesonRaises("dependency('wxwidgets', modules : 1)", + "module argument is not a string") + + def test_llvm_dependency(self): + self.assertMesonRaises("dependency('llvm', modules : 'fail')", + f"(required.*fail|{self.dnf})") + + def test_boost_notfound_dependency(self): + # Can be run even if Boost is found or not + self.assertMesonRaises("dependency('boost', modules : 1)", + "module.*not a string") + self.assertMesonRaises("dependency('boost', modules : 'fail')", + f"(fail.*not found|{self.dnf})") + + def test_boost_BOOST_ROOT_dependency(self): + # Test BOOST_ROOT; can be run even if Boost is found or not + self.assertMesonRaises("dependency('boost')", + f"(boost_root.*absolute|{self.dnf})", + override_envvars = {'BOOST_ROOT': 'relative/path'}) + + def test_dependency_invalid_method(self): + code = '''zlib_dep = dependency('zlib', required : false) + zlib_dep.get_configtool_variable('foo') + ''' + self.assertMesonRaises(code, ".* is not a config-tool dependency") + code = '''zlib_dep = dependency('zlib', required : false) + dep = declare_dependency(dependencies : zlib_dep) + dep.get_pkgconfig_variable('foo') + ''' + self.assertMesonRaises(code, "Method.*pkgconfig.*is invalid.*internal") + code = '''zlib_dep = dependency('zlib', required : false) + dep = declare_dependency(dependencies : zlib_dep) + dep.get_configtool_variable('foo') + ''' + self.assertMesonRaises(code, "Method.*configtool.*is invalid.*internal") + + def test_objc_cpp_detection(self): + ''' + Test that when we can't detect objc or objcpp, we fail gracefully. + ''' + env = get_fake_env() + try: + detect_objc_compiler(env, MachineChoice.HOST) + detect_objcpp_compiler(env, MachineChoice.HOST) + except EnvironmentException: + code = "add_languages('objc')\nadd_languages('objcpp')" + self.assertMesonRaises(code, "Unknown compiler") + return + raise unittest.SkipTest("objc and objcpp found, can't test detection failure") + + def test_subproject_variables(self): + ''' + Test that: + 1. The correct message is outputted when a not-required dep is not + found and the fallback subproject is also not found. + 2. A not-required fallback dependency is not found because the + subproject failed to parse. + 3. A not-found not-required dep with a fallback subproject outputs the + correct message when the fallback subproject is found but the + variable inside it is not. + 4. A fallback dependency is found from the subproject parsed in (3) + 5. A wrap file from a subproject is used but fails because it does not + contain required keys. + ''' + tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables') + stray_file = os.path.join(tdir, 'subprojects/subsubproject.wrap') + if os.path.exists(stray_file): + windows_proof_rm(stray_file) + out = self.init(tdir, inprocess=True) + self.assertRegex(out, r"Neither a subproject directory nor a .*nosubproj.wrap.* file was found") + self.assertRegex(out, r'Function does not take positional arguments.') + self.assertRegex(out, r'Dependency .*somenotfounddep.* from subproject .*subprojects/somesubproj.* found: .*NO.*') + self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*') + self.assertRegex(out, r'Missing key .*source_filename.* in subsubproject.wrap') + windows_proof_rm(stray_file) + + def test_exception_exit_status(self): + ''' + Test exit status on python exception + ''' + tdir = os.path.join(self.unit_test_dir, '21 exit status') + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(tdir, inprocess=False, override_envvars = {'MESON_UNIT_TEST': '1', 'MESON_FORCE_BACKTRACE': ''}) + self.assertEqual(cm.exception.returncode, 2) + self.wipe() + + def test_dict_requires_key_value_pairs(self): + self.assertMesonRaises("dict = {3, 'foo': 'bar'}", + 'Only key:value pairs are valid in dict construction.') + self.assertMesonRaises("{'foo': 'bar', 3}", + 'Only key:value pairs are valid in dict construction.') + + def test_dict_forbids_duplicate_keys(self): + self.assertMesonRaises("dict = {'a': 41, 'a': 42}", + 'Duplicate dictionary key: a.*') + + def test_dict_forbids_integer_key(self): + self.assertMesonRaises("dict = {3: 'foo'}", + 'Key must be a string.*') + + def test_using_too_recent_feature(self): + # Here we use a dict, which was introduced in 0.47.0 + self.assertMesonOutputs("dict = {}", + ".*WARNING.*Project targeting.*but.*", + meson_version='>= 0.46.0') + + def test_using_recent_feature(self): + # Same as above, except the meson version is now appropriate + self.assertMesonDoesNotOutput("dict = {}", + ".*WARNING.*Project targeting.*but.*", + meson_version='>= 0.47') + + def test_using_too_recent_feature_dependency(self): + self.assertMesonOutputs("dependency('pcap', required: false)", + ".*WARNING.*Project targeting.*but.*", + meson_version='>= 0.41.0') + + def test_vcs_tag_featurenew_build_always_stale(self): + 'https://github.com/mesonbuild/meson/issues/3904' + vcs_tag = '''version_data = configuration_data() + version_data.set('PROJVER', '@VCS_TAG@') + vf = configure_file(output : 'version.h.in', configuration: version_data) + f = vcs_tag(input : vf, output : 'version.h') + ''' + msg = '.*WARNING:.*feature.*build_always_stale.*custom_target.*' + self.assertMesonDoesNotOutput(vcs_tag, msg, meson_version='>=0.43') + + def test_missing_subproject_not_required_and_required(self): + self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" + + "sub2 = subproject('not-found-subproject', required: true)", + """.*Subproject "subprojects/not-found-subproject" required but not found.*""") + + def test_get_variable_on_not_found_project(self): + self.assertMesonRaises("sub1 = subproject('not-found-subproject', required: false)\n" + + "sub1.get_variable('naaa')", + """Subproject "subprojects/not-found-subproject" disabled can't get_variable on it.""") + + def test_version_checked_before_parsing_options(self): + ''' + https://github.com/mesonbuild/meson/issues/5281 + ''' + options = "option('some-option', type: 'foo', value: '')" + match = 'Meson version is.*but project requires >=2000' + self.assertMesonRaises("", match, meson_version='>=2000', options=options) + + def test_assert_default_message(self): + self.assertMesonRaises("k1 = 'a'\n" + + "assert({\n" + + " k1: 1,\n" + + "}['a'] == 2)\n", + r"Assert failed: {k1 : 1}\['a'\] == 2") + + def test_wrap_nofallback(self): + self.assertMesonRaises("dependency('notfound', fallback : ['foo', 'foo_dep'])", + r"Dependency 'notfound' is required but not found.", + extra_args=['--wrap-mode=nofallback']) + + def test_message(self): + self.assertMesonOutputs("message('Array:', ['a', 'b'])", + r"Message:.* Array: \['a', 'b'\]") + + def test_warning(self): + self.assertMesonOutputs("warning('Array:', ['a', 'b'])", + r"WARNING:.* Array: \['a', 'b'\]") + + def test_override_dependency_twice(self): + self.assertMesonRaises("meson.override_dependency('foo', declare_dependency())\n" + + "meson.override_dependency('foo', declare_dependency())", + """Tried to override dependency 'foo' which has already been resolved or overridden""") + + @unittest.skipIf(is_windows(), 'zlib is not available on Windows') + def test_override_resolved_dependency(self): + self.assertMesonRaises("dependency('zlib')\n" + + "meson.override_dependency('zlib', declare_dependency())", + """Tried to override dependency 'zlib' which has already been resolved or overridden""") + + def test_error_func(self): + self.assertMesonRaises("error('a', 'b', ['c', ['d', {'e': 'f'}]], 'g')", + r"Problem encountered: a b \['c', \['d', {'e' : 'f'}\]\] g") + + +@unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)") +class WindowsTests(BasePlatformTests): + ''' + Tests that should run on Cygwin, MinGW, and MSVC + ''' + + def setUp(self): + super().setUp() + self.platform_test_dir = os.path.join(self.src_root, 'test cases/windows') + + @unittest.skipIf(is_cygwin(), 'Test only applicable to Windows') + @mock.patch.dict(os.environ) + def test_find_program(self): + ''' + Test that Windows-specific edge-cases in find_program are functioning + correctly. Cannot be an ordinary test because it involves manipulating + PATH to point to a directory with Python scripts. + ''' + testdir = os.path.join(self.platform_test_dir, '8 find program') + # Find `cmd` and `cmd.exe` + prog1 = ExternalProgram('cmd') + self.assertTrue(prog1.found(), msg='cmd not found') + prog2 = ExternalProgram('cmd.exe') + self.assertTrue(prog2.found(), msg='cmd.exe not found') + self.assertPathEqual(prog1.get_path(), prog2.get_path()) + # Find cmd.exe with args without searching + prog = ExternalProgram('cmd', command=['cmd', '/C']) + self.assertTrue(prog.found(), msg='cmd not found with args') + self.assertPathEqual(prog.get_command()[0], 'cmd') + # Find cmd with an absolute path that's missing the extension + cmd_path = prog2.get_path()[:-4] + prog = ExternalProgram(cmd_path) + self.assertTrue(prog.found(), msg=f'{cmd_path!r} not found') + # Finding a script with no extension inside a directory works + prog = ExternalProgram(os.path.join(testdir, 'test-script')) + self.assertTrue(prog.found(), msg='test-script not found') + # Finding a script with an extension inside a directory works + prog = ExternalProgram(os.path.join(testdir, 'test-script-ext.py')) + self.assertTrue(prog.found(), msg='test-script-ext.py not found') + # Finding a script in PATH + os.environ['PATH'] += os.pathsep + testdir + # If `.PY` is in PATHEXT, scripts can be found as programs + if '.PY' in [ext.upper() for ext in os.environ['PATHEXT'].split(';')]: + # Finding a script in PATH w/o extension works and adds the interpreter + prog = ExternalProgram('test-script-ext') + self.assertTrue(prog.found(), msg='test-script-ext not found in PATH') + self.assertPathEqual(prog.get_command()[0], python_command[0]) + self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py') + # Finding a script in PATH with extension works and adds the interpreter + prog = ExternalProgram('test-script-ext.py') + self.assertTrue(prog.found(), msg='test-script-ext.py not found in PATH') + self.assertPathEqual(prog.get_command()[0], python_command[0]) + self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py') + # Using a script with an extension directly via command= works and adds the interpreter + prog = ExternalProgram('test-script-ext.py', command=[os.path.join(testdir, 'test-script-ext.py'), '--help']) + self.assertTrue(prog.found(), msg='test-script-ext.py with full path not picked up via command=') + self.assertPathEqual(prog.get_command()[0], python_command[0]) + self.assertPathEqual(prog.get_command()[2], '--help') + self.assertPathBasenameEqual(prog.get_path(), 'test-script-ext.py') + # Using a script without an extension directly via command= works and adds the interpreter + prog = ExternalProgram('test-script', command=[os.path.join(testdir, 'test-script'), '--help']) + self.assertTrue(prog.found(), msg='test-script with full path not picked up via command=') + self.assertPathEqual(prog.get_command()[0], python_command[0]) + self.assertPathEqual(prog.get_command()[2], '--help') + self.assertPathBasenameEqual(prog.get_path(), 'test-script') + # Ensure that WindowsApps gets removed from PATH + path = os.environ['PATH'] + if 'WindowsApps' not in path: + username = os.environ['USERNAME'] + appstore_dir = fr'C:\Users\{username}\AppData\Local\Microsoft\WindowsApps' + path = os.pathsep + appstore_dir + path = ExternalProgram._windows_sanitize_path(path) + self.assertNotIn('WindowsApps', path) + + def test_ignore_libs(self): + ''' + Test that find_library on libs that are to be ignored returns an empty + array of arguments. Must be a unit test because we cannot inspect + ExternalLibraryHolder from build files. + ''' + testdir = os.path.join(self.platform_test_dir, '1 basic') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_argument_syntax() != 'msvc': + raise unittest.SkipTest('Not using MSVC') + # To force people to update this test, and also test + self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt', 'execinfo'}) + for l in cc.ignore_libs: + self.assertEqual(cc.find_library(l, env, []), []) + + def test_rc_depends_files(self): + testdir = os.path.join(self.platform_test_dir, '5 resources') + + # resource compiler depfile generation is not yet implemented for msvc + env = get_fake_env(testdir, self.builddir, self.prefix) + depfile_works = detect_c_compiler(env, MachineChoice.HOST).get_id() not in {'msvc', 'clang-cl', 'intel-cl'} + + self.init(testdir) + self.build() + # Immediately rebuilding should not do anything + self.assertBuildIsNoop() + # Test compile_resources(depend_file:) + # Changing mtime of sample.ico should rebuild prog + self.utime(os.path.join(testdir, 'res', 'sample.ico')) + self.assertRebuiltTarget('prog') + # Test depfile generation by compile_resources + # Changing mtime of resource.h should rebuild myres.rc and then prog + if depfile_works: + self.utime(os.path.join(testdir, 'inc', 'resource', 'resource.h')) + self.assertRebuiltTarget('prog') + self.wipe() + + if depfile_works: + testdir = os.path.join(self.platform_test_dir, '12 resources with custom targets') + self.init(testdir) + self.build() + # Immediately rebuilding should not do anything + self.assertBuildIsNoop() + # Changing mtime of resource.h should rebuild myres_1.rc and then prog_1 + self.utime(os.path.join(testdir, 'res', 'resource.h')) + self.assertRebuiltTarget('prog_1') + + def test_msvc_cpp17(self): + testdir = os.path.join(self.unit_test_dir, '45 vscpp17') + + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_argument_syntax() != 'msvc': + raise unittest.SkipTest('Test only applies to MSVC-like compilers') + + try: + self.init(testdir) + except subprocess.CalledProcessError: + # According to Python docs, output is only stored when + # using check_output. We don't use it, so we can't check + # that the output is correct (i.e. that it failed due + # to the right reason). + return + self.build() + + def test_install_pdb_introspection(self): + testdir = os.path.join(self.platform_test_dir, '1 basic') + + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_argument_syntax() != 'msvc': + raise unittest.SkipTest('Test only applies to MSVC-like compilers') + + self.init(testdir) + installed = self.introspect('--installed') + files = [os.path.basename(path) for path in installed.values()] + + self.assertTrue('prog.pdb' in files) + + def _check_ld(self, name: str, lang: str, expected: str) -> None: + if not shutil.which(name): + raise unittest.SkipTest(f'Could not find {name}.') + envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP[f'{lang}_ld']] + + # Also test a deprecated variable if there is one. + if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP: + envvars.append( + mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP[f'{lang}_ld']) + + for envvar in envvars: + with mock.patch.dict(os.environ, {envvar: name}): + env = get_fake_env() + try: + comp = compiler_from_language(env, lang, MachineChoice.HOST) + except EnvironmentException: + raise unittest.SkipTest(f'Could not find a compiler for {lang}') + self.assertEqual(comp.linker.id, expected) + + def test_link_environment_variable_lld_link(self): + env = get_fake_env() + comp = detect_c_compiler(env, MachineChoice.HOST) + if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler): + raise unittest.SkipTest('GCC cannot be used with link compatible linkers.') + self._check_ld('lld-link', 'c', 'lld-link') + + def test_link_environment_variable_link(self): + env = get_fake_env() + comp = detect_c_compiler(env, MachineChoice.HOST) + if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler): + raise unittest.SkipTest('GCC cannot be used with link compatible linkers.') + self._check_ld('link', 'c', 'link') + + def test_link_environment_variable_optlink(self): + env = get_fake_env() + comp = detect_c_compiler(env, MachineChoice.HOST) + if isinstance(comp, mesonbuild.compilers.GnuLikeCompiler): + raise unittest.SkipTest('GCC cannot be used with link compatible linkers.') + self._check_ld('optlink', 'c', 'optlink') + + @skip_if_not_language('rust') + def test_link_environment_variable_rust(self): + self._check_ld('link', 'rust', 'link') + + @skip_if_not_language('d') + def test_link_environment_variable_d(self): + env = get_fake_env() + comp = detect_d_compiler(env, MachineChoice.HOST) + if comp.id == 'dmd': + raise unittest.SkipTest('meson cannot reliably make DMD use a different linker.') + self._check_ld('lld-link', 'd', 'lld-link') + + def test_pefile_checksum(self): + try: + import pefile + except ImportError: + if is_ci(): + raise + raise unittest.SkipTest('pefile module not found') + testdir = os.path.join(self.common_test_dir, '6 linkshared') + self.init(testdir, extra_args=['--buildtype=release']) + self.build() + # Test that binaries have a non-zero checksum + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + cc_id = cc.get_id() + ld_id = cc.get_linker_id() + dll = glob(os.path.join(self.builddir, '*mycpplib.dll'))[0] + exe = os.path.join(self.builddir, 'cppprog.exe') + for f in (dll, exe): + pe = pefile.PE(f) + msg = f'PE file: {f!r}, compiler: {cc_id!r}, linker: {ld_id!r}' + if cc_id == 'clang-cl': + # Latest clang-cl tested (7.0) does not write checksums out + self.assertFalse(pe.verify_checksum(), msg=msg) + else: + # Verify that a valid checksum was written by all other compilers + self.assertTrue(pe.verify_checksum(), msg=msg) + + def test_qt5dependency_vscrt(self): + ''' + Test that qt5 dependencies use the debug module suffix when b_vscrt is + set to 'mdd' + ''' + # Verify that the `b_vscrt` option is available + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + if OptionKey('b_vscrt') not in cc.base_options: + raise unittest.SkipTest('Compiler does not support setting the VS CRT') + # Verify that qmake is for Qt5 + if not shutil.which('qmake-qt5'): + if not shutil.which('qmake') and not is_ci(): + raise unittest.SkipTest('QMake not found') + output = subprocess.getoutput('qmake --version') + if 'Qt version 5' not in output and not is_ci(): + raise unittest.SkipTest('Qmake found, but it is not for Qt 5.') + # Setup with /MDd + testdir = os.path.join(self.framework_test_dir, '4 qt') + self.init(testdir, extra_args=['-Db_vscrt=mdd']) + # Verify that we're linking to the debug versions of Qt DLLs + build_ninja = os.path.join(self.builddir, 'build.ninja') + with open(build_ninja, encoding='utf-8') as f: + contents = f.read() + m = re.search('build qt5core.exe: cpp_LINKER.*Qt5Cored.lib', contents) + self.assertIsNotNone(m, msg=contents) + + def test_compiler_checks_vscrt(self): + ''' + Test that the correct VS CRT is used when running compiler checks + ''' + # Verify that the `b_vscrt` option is available + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + if OptionKey('b_vscrt') not in cc.base_options: + raise unittest.SkipTest('Compiler does not support setting the VS CRT') + + def sanitycheck_vscrt(vscrt): + checks = self.get_meson_log_sanitychecks() + self.assertTrue(len(checks) > 0) + for check in checks: + self.assertIn(vscrt, check) + + testdir = os.path.join(self.common_test_dir, '1 trivial') + self.init(testdir) + sanitycheck_vscrt('/MDd') + + self.new_builddir() + self.init(testdir, extra_args=['-Dbuildtype=debugoptimized']) + sanitycheck_vscrt('/MD') + + self.new_builddir() + self.init(testdir, extra_args=['-Dbuildtype=release']) + sanitycheck_vscrt('/MD') + + self.new_builddir() + self.init(testdir, extra_args=['-Db_vscrt=md']) + sanitycheck_vscrt('/MD') + + self.new_builddir() + self.init(testdir, extra_args=['-Db_vscrt=mdd']) + sanitycheck_vscrt('/MDd') + + self.new_builddir() + self.init(testdir, extra_args=['-Db_vscrt=mt']) + sanitycheck_vscrt('/MT') + + self.new_builddir() + self.init(testdir, extra_args=['-Db_vscrt=mtd']) + sanitycheck_vscrt('/MTd') + + def test_modules(self): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'C++ modules only work with the Ninja backend (not {self.backend.name}).') + if 'VSCMD_VER' not in os.environ: + raise unittest.SkipTest('C++ modules is only supported with Visual Studio.') + if version_compare(os.environ['VSCMD_VER'], '<16.10.0'): + raise unittest.SkipTest('C++ modules are only supported with VS 2019 Preview or newer.') + self.init(os.path.join(self.unit_test_dir, '86 cpp modules')) + self.build() + + +@unittest.skipUnless(is_osx(), "requires Darwin") +class DarwinTests(BasePlatformTests): + ''' + Tests that should run on macOS + ''' + + def setUp(self): + super().setUp() + self.platform_test_dir = os.path.join(self.src_root, 'test cases/osx') + + def test_apple_bitcode(self): + ''' + Test that -fembed-bitcode is correctly added while compiling and + -bitcode_bundle is added while linking when b_bitcode is true and not + when it is false. This can't be an ordinary test case because we need + to inspect the compiler database. + ''' + testdir = os.path.join(self.platform_test_dir, '7 bitcode') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.id != 'clang': + raise unittest.SkipTest('Not using Clang on OSX') + # Try with bitcode enabled + out = self.init(testdir, extra_args='-Db_bitcode=true') + # Warning was printed + self.assertRegex(out, 'WARNING:.*b_bitcode') + # Compiler options were added + for compdb in self.get_compdb(): + if 'module' in compdb['file']: + self.assertNotIn('-fembed-bitcode', compdb['command']) + else: + self.assertIn('-fembed-bitcode', compdb['command']) + build_ninja = os.path.join(self.builddir, 'build.ninja') + # Linker options were added + with open(build_ninja, encoding='utf-8') as f: + contents = f.read() + m = re.search('LINK_ARGS =.*-bitcode_bundle', contents) + self.assertIsNotNone(m, msg=contents) + # Try with bitcode disabled + self.setconf('-Db_bitcode=false') + # Regenerate build + self.build() + for compdb in self.get_compdb(): + self.assertNotIn('-fembed-bitcode', compdb['command']) + build_ninja = os.path.join(self.builddir, 'build.ninja') + with open(build_ninja, encoding='utf-8') as f: + contents = f.read() + m = re.search('LINK_ARGS =.*-bitcode_bundle', contents) + self.assertIsNone(m, msg=contents) + + def test_apple_bitcode_modules(self): + ''' + Same as above, just for shared_module() + ''' + testdir = os.path.join(self.common_test_dir, '148 shared module resolving symbol in executable') + # Ensure that it builds even with bitcode enabled + self.init(testdir, extra_args='-Db_bitcode=true') + self.build() + self.run_tests() + + def _get_darwin_versions(self, fname): + fname = os.path.join(self.builddir, fname) + out = subprocess.check_output(['otool', '-L', fname], universal_newlines=True) + m = re.match(r'.*version (.*), current version (.*)\)', out.split('\n')[1]) + self.assertIsNotNone(m, msg=out) + return m.groups() + + @skipIfNoPkgconfig + def test_library_versioning(self): + ''' + Ensure that compatibility_version and current_version are set correctly + ''' + testdir = os.path.join(self.platform_test_dir, '2 library versions') + self.init(testdir) + self.build() + targets = {} + for t in self.introspect('--targets'): + targets[t['name']] = t['filename'][0] if isinstance(t['filename'], list) else t['filename'] + self.assertEqual(self._get_darwin_versions(targets['some']), ('7.0.0', '7.0.0')) + self.assertEqual(self._get_darwin_versions(targets['noversion']), ('0.0.0', '0.0.0')) + self.assertEqual(self._get_darwin_versions(targets['onlyversion']), ('1.0.0', '1.0.0')) + self.assertEqual(self._get_darwin_versions(targets['onlysoversion']), ('5.0.0', '5.0.0')) + self.assertEqual(self._get_darwin_versions(targets['intver']), ('2.0.0', '2.0.0')) + self.assertEqual(self._get_darwin_versions(targets['stringver']), ('2.3.0', '2.3.0')) + self.assertEqual(self._get_darwin_versions(targets['stringlistver']), ('2.4.0', '2.4.0')) + self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0')) + self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1')) + + def test_duplicate_rpath(self): + testdir = os.path.join(self.unit_test_dir, '10 build_rpath') + # We purposely pass a duplicate rpath to Meson, in order + # to ascertain that Meson does not call install_name_tool + # with duplicate -delete_rpath arguments, which would + # lead to erroring out on installation + env = {"LDFLAGS": "-Wl,-rpath,/foo/bar"} + self.init(testdir, override_envvars=env) + self.build() + self.install() + + def test_removing_unused_linker_args(self): + testdir = os.path.join(self.common_test_dir, '104 has arg') + env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic -framework Foundation'} + self.init(testdir, override_envvars=env) + + def test_objc_versions(self): + # Objective-C always uses the C standard version. + # Objecttive-C++ always uses the C++ standard version. + # This is what most people seem to want and in addition + # it is the only setup supported by Xcode. + testdir = os.path.join(self.objc_test_dir, '1 simple') + self.init(testdir) + self.assertIn('-std=c99', self.get_compdb()[0]['command']) + self.wipe() + testdir = os.path.join(self.objcpp_test_dir, '1 simple') + self.init(testdir) + self.assertIn('-std=c++14', self.get_compdb()[0]['command']) + +@unittest.skipUnless(not is_windows(), "requires something Unix-like") +class LinuxlikeTests(BasePlatformTests): + ''' + Tests that should run on Linux, macOS, and *BSD + ''' + + def test_basic_soname(self): + ''' + Test that the soname is set correctly for shared libraries. This can't + be an ordinary test case because we need to run `readelf` and actually + check the soname. + https://github.com/mesonbuild/meson/issues/785 + ''' + testdir = os.path.join(self.common_test_dir, '4 shared') + self.init(testdir) + self.build() + lib1 = os.path.join(self.builddir, 'libmylib.so') + soname = get_soname(lib1) + self.assertEqual(soname, 'libmylib.so') + + def test_custom_soname(self): + ''' + Test that the soname is set correctly for shared libraries when + a custom prefix and/or suffix is used. This can't be an ordinary test + case because we need to run `readelf` and actually check the soname. + https://github.com/mesonbuild/meson/issues/785 + ''' + testdir = os.path.join(self.common_test_dir, '24 library versions') + self.init(testdir) + self.build() + lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix') + soname = get_soname(lib1) + self.assertEqual(soname, 'prefixsomelib.suffix') + + def test_pic(self): + ''' + Test that -fPIC is correctly added to static libraries when b_staticpic + is true and not when it is false. This can't be an ordinary test case + because we need to inspect the compiler database. + ''' + if is_windows() or is_cygwin() or is_osx(): + raise unittest.SkipTest('PIC not relevant') + + testdir = os.path.join(self.common_test_dir, '3 static') + self.init(testdir) + compdb = self.get_compdb() + self.assertIn('-fPIC', compdb[0]['command']) + self.setconf('-Db_staticpic=false') + # Regenerate build + self.build() + compdb = self.get_compdb() + self.assertNotIn('-fPIC', compdb[0]['command']) + + @mock.patch.dict(os.environ) + def test_pkgconfig_gen(self): + ''' + Test that generated pkg-config files can be found and have the correct + version and link args. This can't be an ordinary test case because we + need to run pkg-config outside of a Meson build file. + https://github.com/mesonbuild/meson/issues/889 + ''' + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen') + self.init(testdir) + env = get_fake_env(testdir, self.builddir, self.prefix) + kwargs = {'required': True, 'silent': True} + os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir + foo_dep = PkgConfigDependency('libfoo', env, kwargs) + self.assertTrue(foo_dep.found()) + self.assertEqual(foo_dep.get_version(), '1.0') + self.assertIn('-lfoo', foo_dep.get_link_args()) + self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar') + self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data') + + libhello_nolib = PkgConfigDependency('libhello_nolib', env, kwargs) + self.assertTrue(libhello_nolib.found()) + self.assertEqual(libhello_nolib.get_link_args(), []) + self.assertEqual(libhello_nolib.get_compile_args(), []) + self.assertEqual(libhello_nolib.get_pkgconfig_variable('foo', {}), 'bar') + self.assertEqual(libhello_nolib.get_pkgconfig_variable('prefix', {}), self.prefix) + self.assertEqual(libhello_nolib.get_pkgconfig_variable('escaped_var', {}), r'hello\ world') + self.assertEqual(libhello_nolib.get_pkgconfig_variable('unescaped_var', {}), 'hello world') + + cc = detect_c_compiler(env, MachineChoice.HOST) + if cc.get_id() in {'gcc', 'clang'}: + for name in {'ct', 'ct0'}: + ct_dep = PkgConfigDependency(name, env, kwargs) + self.assertTrue(ct_dep.found()) + self.assertIn('-lct', ct_dep.get_link_args()) + + def test_pkgconfig_gen_deps(self): + ''' + Test that generated pkg-config files correctly handle dependencies + ''' + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen') + self.init(testdir) + privatedir1 = self.privatedir + + self.new_builddir() + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen', 'dependencies') + self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': privatedir1}) + privatedir2 = self.privatedir + + env = { + 'PKG_CONFIG_LIBDIR': os.pathsep.join([privatedir1, privatedir2]), + 'PKG_CONFIG_SYSTEM_LIBRARY_PATH': '/usr/lib', + } + self._run(['pkg-config', 'dependency-test', '--validate'], override_envvars=env) + + # pkg-config strips some duplicated flags so we have to parse the + # generated file ourself. + expected = { + 'Requires': 'libexposed', + 'Requires.private': 'libfoo >= 1.0', + 'Libs': '-L${libdir} -llibmain -pthread -lcustom', + 'Libs.private': '-lcustom2 -L${libdir} -llibinternal', + 'Cflags': '-I${includedir} -pthread -DCUSTOM', + } + if is_osx() or is_haiku(): + expected['Cflags'] = expected['Cflags'].replace('-pthread ', '') + with open(os.path.join(privatedir2, 'dependency-test.pc'), encoding='utf-8') as f: + matched_lines = 0 + for line in f: + parts = line.split(':', 1) + if parts[0] in expected: + key = parts[0] + val = parts[1].strip() + expected_val = expected[key] + self.assertEqual(expected_val, val) + matched_lines += 1 + self.assertEqual(len(expected), matched_lines) + + cmd = ['pkg-config', 'requires-test'] + out = self._run(cmd + ['--print-requires'], override_envvars=env).strip().split('\n') + if not is_openbsd(): + self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello'])) + else: + self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello'])) + + cmd = ['pkg-config', 'requires-private-test'] + out = self._run(cmd + ['--print-requires-private'], override_envvars=env).strip().split('\n') + if not is_openbsd(): + self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello'])) + else: + self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello'])) + + cmd = ['pkg-config', 'pub-lib-order'] + out = self._run(cmd + ['--libs'], override_envvars=env).strip().split() + self.assertEqual(out, ['-llibmain2', '-llibinternal']) + + # See common/44 pkgconfig-gen/meson.build for description of the case this test + with open(os.path.join(privatedir1, 'simple2.pc'), encoding='utf-8') as f: + content = f.read() + self.assertIn('Libs: -L${libdir} -lsimple2 -lsimple1', content) + self.assertIn('Libs.private: -lz', content) + + with open(os.path.join(privatedir1, 'simple3.pc'), encoding='utf-8') as f: + content = f.read() + self.assertEqual(1, content.count('-lsimple3')) + + with open(os.path.join(privatedir1, 'simple5.pc'), encoding='utf-8') as f: + content = f.read() + self.assertNotIn('-lstat2', content) + + @mock.patch.dict(os.environ) + def test_pkgconfig_uninstalled(self): + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen') + self.init(testdir) + self.build() + + os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(self.builddir, 'meson-uninstalled') + if is_cygwin(): + os.environ['PATH'] += os.pathsep + self.builddir + + self.new_builddir() + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen', 'dependencies') + self.init(testdir) + self.build() + self.run_tests() + + def test_pkg_unfound(self): + testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig') + self.init(testdir) + with open(os.path.join(self.privatedir, 'somename.pc'), encoding='utf-8') as f: + pcfile = f.read() + self.assertFalse('blub_blob_blib' in pcfile) + + def test_symlink_builddir(self): + ''' + Test using a symlink as either the builddir for "setup" or + the argument for "-C". + ''' + testdir = os.path.join(self.common_test_dir, '1 trivial') + os.symlink(self.builddir, self.builddir + '-symlink') + self.change_builddir(self.builddir + '-symlink') + self.init(testdir) + self.build() + self._run(self.mtest_command) + + def test_vala_c_warnings(self): + ''' + Test that no warnings are emitted for C code generated by Vala. This + can't be an ordinary test case because we need to inspect the compiler + database. + https://github.com/mesonbuild/meson/issues/864 + ''' + if not shutil.which('valac'): + raise unittest.SkipTest('valac not installed.') + testdir = os.path.join(self.vala_test_dir, '5 target glib') + self.init(testdir) + compdb = self.get_compdb() + vala_command = None + c_command = None + for each in compdb: + if each['file'].endswith('GLib.Thread.c'): + vala_command = each['command'] + elif each['file'].endswith('GLib.Thread.vala'): + continue + elif each['file'].endswith('retcode.c'): + c_command = each['command'] + else: + m = 'Unknown file {!r} in vala_c_warnings test'.format(each['file']) + raise AssertionError(m) + self.assertIsNotNone(vala_command) + self.assertIsNotNone(c_command) + # -w suppresses all warnings, should be there in Vala but not in C + self.assertIn(" -w ", vala_command) + self.assertNotIn(" -w ", c_command) + # -Wall enables all warnings, should be there in C but not in Vala + self.assertNotIn(" -Wall ", vala_command) + self.assertIn(" -Wall ", c_command) + # -Werror converts warnings to errors, should always be there since it's + # injected by an unrelated piece of code and the project has werror=true + self.assertIn(" -Werror ", vala_command) + self.assertIn(" -Werror ", c_command) + + @skipIfNoPkgconfig + def test_qtdependency_pkgconfig_detection(self): + ''' + Test that qt4 and qt5 detection with pkgconfig works. + ''' + # Verify Qt4 or Qt5 can be found with pkg-config + qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore']) + qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core']) + testdir = os.path.join(self.framework_test_dir, '4 qt') + self.init(testdir, extra_args=['-Dmethod=pkg-config']) + # Confirm that the dependency was found with pkg-config + mesonlog = self.get_meson_log() + if qt4 == 0: + self.assertRegex('\n'.join(mesonlog), + r'Run-time dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)') + if qt5 == 0: + self.assertRegex('\n'.join(mesonlog), + r'Run-time dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)') + + @skip_if_not_base_option('b_sanitize') + def test_generate_gir_with_address_sanitizer(self): + if is_cygwin(): + raise unittest.SkipTest('asan not available on Cygwin') + if is_openbsd(): + raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD') + + testdir = os.path.join(self.framework_test_dir, '7 gnome') + self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false']) + self.build() + + def test_qt5dependency_qmake_detection(self): + ''' + Test that qt5 detection with qmake works. This can't be an ordinary + test case because it involves setting the environment. + ''' + # Verify that qmake is for Qt5 + if not shutil.which('qmake-qt5'): + if not shutil.which('qmake'): + raise unittest.SkipTest('QMake not found') + output = subprocess.getoutput('qmake --version') + if 'Qt version 5' not in output: + raise unittest.SkipTest('Qmake found, but it is not for Qt 5.') + # Disable pkg-config codepath and force searching with qmake/qmake-qt5 + testdir = os.path.join(self.framework_test_dir, '4 qt') + self.init(testdir, extra_args=['-Dmethod=qmake']) + # Confirm that the dependency was found with qmake + mesonlog = self.get_meson_log() + self.assertRegex('\n'.join(mesonlog), + r'Run-time dependency qt5 \(modules: Core\) found: YES .* \(qmake\)\n') + + def test_qt6dependency_qmake_detection(self): + ''' + Test that qt6 detection with qmake works. This can't be an ordinary + test case because it involves setting the environment. + ''' + # Verify that qmake is for Qt5 + if not shutil.which('qmake-qt6'): + if not shutil.which('qmake'): + raise unittest.SkipTest('QMake not found') + output = subprocess.getoutput('qmake --version') + if 'Qt version 6' not in output: + raise unittest.SkipTest('Qmake found, but it is not for Qt 6.') + # Disable pkg-config codepath and force searching with qmake/qmake-qt6 + testdir = os.path.join(self.framework_test_dir, '4 qt') + self.init(testdir, extra_args=['-Dmethod=qmake']) + # Confirm that the dependency was found with qmake + mesonlog = self.get_meson_log() + self.assertRegex('\n'.join(mesonlog), + r'Run-time dependency qt6 \(modules: Core\) found: YES .* \(qmake\)\n') + + def glob_sofiles_without_privdir(self, g): + files = glob(g) + return [f for f in files if not f.endswith('.p')] + + def _test_soname_impl(self, libpath, install): + if is_cygwin() or is_osx(): + raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames') + + testdir = os.path.join(self.unit_test_dir, '1 soname') + self.init(testdir) + self.build() + if install: + self.install() + + # File without aliases set. + nover = os.path.join(libpath, 'libnover.so') + self.assertPathExists(nover) + self.assertFalse(os.path.islink(nover)) + self.assertEqual(get_soname(nover), 'libnover.so') + self.assertEqual(len(self.glob_sofiles_without_privdir(nover[:-3] + '*')), 1) + + # File with version set + verset = os.path.join(libpath, 'libverset.so') + self.assertPathExists(verset + '.4.5.6') + self.assertEqual(os.readlink(verset), 'libverset.so.4') + self.assertEqual(get_soname(verset), 'libverset.so.4') + self.assertEqual(len(self.glob_sofiles_without_privdir(verset[:-3] + '*')), 3) + + # File with soversion set + soverset = os.path.join(libpath, 'libsoverset.so') + self.assertPathExists(soverset + '.1.2.3') + self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3') + self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3') + self.assertEqual(len(self.glob_sofiles_without_privdir(soverset[:-3] + '*')), 2) + + # File with version and soversion set to same values + settosame = os.path.join(libpath, 'libsettosame.so') + self.assertPathExists(settosame + '.7.8.9') + self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9') + self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9') + self.assertEqual(len(self.glob_sofiles_without_privdir(settosame[:-3] + '*')), 2) + + # File with version and soversion set to different values + bothset = os.path.join(libpath, 'libbothset.so') + self.assertPathExists(bothset + '.1.2.3') + self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3') + self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6') + self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3') + self.assertEqual(len(self.glob_sofiles_without_privdir(bothset[:-3] + '*')), 3) + + def test_soname(self): + self._test_soname_impl(self.builddir, False) + + def test_installed_soname(self): + libdir = self.installdir + os.path.join(self.prefix, self.libdir) + self._test_soname_impl(libdir, True) + + def test_compiler_check_flags_order(self): + ''' + Test that compiler check flags override all other flags. This can't be + an ordinary test case because it needs the environment to be set. + ''' + testdir = os.path.join(self.common_test_dir, '36 has function') + env = get_fake_env(testdir, self.builddir, self.prefix) + cpp = detect_cpp_compiler(env, MachineChoice.HOST) + Oflag = '-O3' + OflagCPP = Oflag + if cpp.get_id() in ('clang', 'gcc'): + # prevent developers from adding "int main(int argc, char **argv)" + # to small Meson checks unless these parameters are actually used + OflagCPP += ' -Werror=unused-parameter' + env = {'CFLAGS': Oflag, + 'CXXFLAGS': OflagCPP} + self.init(testdir, override_envvars=env) + cmds = self.get_meson_log_compiler_checks() + for cmd in cmds: + if cmd[0] == 'ccache': + cmd = cmd[1:] + # Verify that -I flags from the `args` kwarg are first + # This is set in the '36 has function' test case + self.assertEqual(cmd[1], '-I/tmp') + # Verify that -O3 set via the environment is overridden by -O0 + Oargs = [arg for arg in cmd if arg.startswith('-O')] + self.assertEqual(Oargs, [Oflag, '-O0']) + + def _test_stds_impl(self, testdir: str, compiler: 'Compiler') -> None: + has_cpp17 = (compiler.get_id() not in {'clang', 'gcc'} or + compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=5.0.0', '>=9.1') or + compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=5.0.0')) + has_cpp2a_c17 = (compiler.get_id() not in {'clang', 'gcc'} or + compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=6.0.0', '>=10.0') or + compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0')) + has_cpp20 = (compiler.get_id() not in {'clang', 'gcc'} or + compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=10.0.0', None) or + compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=10.0.0')) + has_c18 = (compiler.get_id() not in {'clang', 'gcc'} or + compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=8.0.0', '>=11.0') or + compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0')) + # Check that all the listed -std=xxx options for this compiler work just fine when used + # https://en.wikipedia.org/wiki/Xcode#Latest_versions + # https://www.gnu.org/software/gcc/projects/cxx-status.html + key = OptionKey('std', lang=compiler.language) + for v in compiler.get_options()[key].choices: + # we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly + # thus, C++ first + if '++17' in v and not has_cpp17: + continue + elif '++2a' in v and not has_cpp2a_c17: # https://en.cppreference.com/w/cpp/compiler_support + continue + elif '++20' in v and not has_cpp20: + continue + # now C + elif '17' in v and not has_cpp2a_c17: + continue + elif '18' in v and not has_c18: + continue + self.init(testdir, extra_args=[f'-D{key!s}={v}']) + cmd = self.get_compdb()[0]['command'] + # c++03 and gnu++03 are not understood by ICC, don't try to look for them + skiplist = frozenset([ + ('intel', 'c++03'), + ('intel', 'gnu++03')]) + if v != 'none' and not (compiler.get_id(), v) in skiplist: + cmd_std = f" -std={v} " + self.assertIn(cmd_std, cmd) + try: + self.build() + except Exception: + print(f'{key!s} was {v!r}') + raise + self.wipe() + # Check that an invalid std option in CFLAGS/CPPFLAGS fails + # Needed because by default ICC ignores invalid options + cmd_std = '-std=FAIL' + if compiler.language == 'c': + env_flag_name = 'CFLAGS' + elif compiler.language == 'cpp': + env_flag_name = 'CXXFLAGS' + else: + raise NotImplementedError(f'Language {compiler.language} not defined.') + env = {} + env[env_flag_name] = cmd_std + with self.assertRaises((subprocess.CalledProcessError, mesonbuild.mesonlib.EnvironmentException), + msg='C compiler should have failed with -std=FAIL'): + self.init(testdir, override_envvars = env) + # ICC won't fail in the above because additional flags are needed to + # make unknown -std=... options errors. + self.build() + + def test_compiler_c_stds(self): + ''' + Test that C stds specified for this compiler can all be used. Can't be + an ordinary test because it requires passing options to meson. + ''' + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = detect_c_compiler(env, MachineChoice.HOST) + self._test_stds_impl(testdir, cc) + + def test_compiler_cpp_stds(self): + ''' + Test that C++ stds specified for this compiler can all be used. Can't + be an ordinary test because it requires passing options to meson. + ''' + testdir = os.path.join(self.common_test_dir, '2 cpp') + env = get_fake_env(testdir, self.builddir, self.prefix) + cpp = detect_cpp_compiler(env, MachineChoice.HOST) + self._test_stds_impl(testdir, cpp) + + def test_unity_subproj(self): + testdir = os.path.join(self.common_test_dir, '42 subproject') + self.init(testdir, extra_args='--unity=subprojects') + pdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/simpletest*.p')) + self.assertEqual(len(pdirs), 1) + self.assertPathExists(os.path.join(pdirs[0], 'simpletest-unity0.c')) + sdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/*sublib*.p')) + self.assertEqual(len(sdirs), 1) + self.assertPathExists(os.path.join(sdirs[0], 'sublib-unity0.c')) + self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c')) + self.build() + + def test_installed_modes(self): + ''' + Test that files installed by these tests have the correct permissions. + Can't be an ordinary test because our installed_files.txt is very basic. + ''' + # Test file modes + testdir = os.path.join(self.common_test_dir, '12 data') + self.init(testdir) + self.install() + + f = os.path.join(self.installdir, 'etc', 'etcfile.dat') + found_mode = stat.filemode(os.stat(f).st_mode) + want_mode = 'rw------T' + self.assertEqual(want_mode, found_mode[1:]) + + f = os.path.join(self.installdir, 'usr', 'bin', 'runscript.sh') + statf = os.stat(f) + found_mode = stat.filemode(statf.st_mode) + want_mode = 'rwxr-sr-x' + self.assertEqual(want_mode, found_mode[1:]) + if os.getuid() == 0: + # The chown failed nonfatally if we're not root + self.assertEqual(0, statf.st_uid) + self.assertEqual(0, statf.st_gid) + + f = os.path.join(self.installdir, 'usr', 'share', 'progname', + 'fileobject_datafile.dat') + orig = os.path.join(testdir, 'fileobject_datafile.dat') + statf = os.stat(f) + statorig = os.stat(orig) + found_mode = stat.filemode(statf.st_mode) + orig_mode = stat.filemode(statorig.st_mode) + self.assertEqual(orig_mode[1:], found_mode[1:]) + self.assertEqual(os.getuid(), statf.st_uid) + if os.getuid() == 0: + # The chown failed nonfatally if we're not root + self.assertEqual(0, statf.st_gid) + + self.wipe() + # Test directory modes + testdir = os.path.join(self.common_test_dir, '59 install subdir') + self.init(testdir) + self.install() + + f = os.path.join(self.installdir, 'usr', 'share', 'sub1', 'second.dat') + statf = os.stat(f) + found_mode = stat.filemode(statf.st_mode) + want_mode = 'rwxr-x--t' + self.assertEqual(want_mode, found_mode[1:]) + if os.getuid() == 0: + # The chown failed nonfatally if we're not root + self.assertEqual(0, statf.st_uid) + + def test_installed_modes_extended(self): + ''' + Test that files are installed with correct permissions using install_mode. + ''' + testdir = os.path.join(self.common_test_dir, '190 install_mode') + self.init(testdir) + self.build() + self.install() + + for fsobj, want_mode in [ + ('bin', 'drwxr-x---'), + ('bin/runscript.sh', '-rwxr-sr-x'), + ('bin/trivialprog', '-rwxr-sr-x'), + ('include', 'drwxr-x---'), + ('include/config.h', '-rw-rwSr--'), + ('include/rootdir.h', '-r--r--r-T'), + ('lib', 'drwxr-x---'), + ('lib/libstat.a', '-rw---Sr--'), + ('share', 'drwxr-x---'), + ('share/man', 'drwxr-x---'), + ('share/man/man1', 'drwxr-x---'), + ('share/man/man1/foo.1', '-r--r--r-T'), + ('share/sub1', 'drwxr-x---'), + ('share/sub1/second.dat', '-rwxr-x--t'), + ('subdir', 'drwxr-x---'), + ('subdir/data.dat', '-rw-rwSr--'), + ]: + f = os.path.join(self.installdir, 'usr', *fsobj.split('/')) + found_mode = stat.filemode(os.stat(f).st_mode) + self.assertEqual(want_mode, found_mode, + msg=('Expected file %s to have mode %s but found %s instead.' % + (fsobj, want_mode, found_mode))) + # Ensure that introspect --installed works on all types of files + # FIXME: also verify the files list + self.introspect('--installed') + + def test_install_umask(self): + ''' + Test that files are installed with correct permissions using default + install umask of 022, regardless of the umask at time the worktree + was checked out or the build was executed. + ''' + # Copy source tree to a temporary directory and change permissions + # there to simulate a checkout with umask 002. + orig_testdir = os.path.join(self.unit_test_dir, '26 install umask') + # Create a new testdir under tmpdir. + tmpdir = os.path.realpath(tempfile.mkdtemp()) + self.addCleanup(windows_proof_rmtree, tmpdir) + testdir = os.path.join(tmpdir, '26 install umask') + # Copy the tree using shutil.copyfile, which will use the current umask + # instead of preserving permissions of the old tree. + save_umask = os.umask(0o002) + self.addCleanup(os.umask, save_umask) + shutil.copytree(orig_testdir, testdir, copy_function=shutil.copyfile) + # Preserve the executable status of subdir/sayhello though. + os.chmod(os.path.join(testdir, 'subdir', 'sayhello'), 0o775) + self.init(testdir) + # Run the build under a 027 umask now. + os.umask(0o027) + self.build() + # And keep umask 027 for the install step too. + self.install() + + for executable in [ + 'bin/prog', + 'share/subdir/sayhello', + ]: + f = os.path.join(self.installdir, 'usr', *executable.split('/')) + found_mode = stat.filemode(os.stat(f).st_mode) + want_mode = '-rwxr-xr-x' + self.assertEqual(want_mode, found_mode, + msg=('Expected file %s to have mode %s but found %s instead.' % + (executable, want_mode, found_mode))) + + for directory in [ + 'usr', + 'usr/bin', + 'usr/include', + 'usr/share', + 'usr/share/man', + 'usr/share/man/man1', + 'usr/share/subdir', + ]: + f = os.path.join(self.installdir, *directory.split('/')) + found_mode = stat.filemode(os.stat(f).st_mode) + want_mode = 'drwxr-xr-x' + self.assertEqual(want_mode, found_mode, + msg=('Expected directory %s to have mode %s but found %s instead.' % + (directory, want_mode, found_mode))) + + for datafile in [ + 'include/sample.h', + 'share/datafile.cat', + 'share/file.dat', + 'share/man/man1/prog.1', + 'share/subdir/datafile.dog', + ]: + f = os.path.join(self.installdir, 'usr', *datafile.split('/')) + found_mode = stat.filemode(os.stat(f).st_mode) + want_mode = '-rw-r--r--' + self.assertEqual(want_mode, found_mode, + msg=('Expected file %s to have mode %s but found %s instead.' % + (datafile, want_mode, found_mode))) + + def test_cpp_std_override(self): + testdir = os.path.join(self.unit_test_dir, '6 std override') + self.init(testdir) + compdb = self.get_compdb() + # Don't try to use -std=c++03 as a check for the + # presence of a compiler flag, as ICC does not + # support it. + for i in compdb: + if 'prog98' in i['file']: + c98_comp = i['command'] + if 'prog11' in i['file']: + c11_comp = i['command'] + if 'progp' in i['file']: + plain_comp = i['command'] + self.assertNotEqual(len(plain_comp), 0) + self.assertIn('-std=c++98', c98_comp) + self.assertNotIn('-std=c++11', c98_comp) + self.assertIn('-std=c++11', c11_comp) + self.assertNotIn('-std=c++98', c11_comp) + self.assertNotIn('-std=c++98', plain_comp) + self.assertNotIn('-std=c++11', plain_comp) + # Now werror + self.assertIn('-Werror', plain_comp) + self.assertNotIn('-Werror', c98_comp) + + def test_run_installed(self): + if is_cygwin() or is_osx(): + raise unittest.SkipTest('LD_LIBRARY_PATH and RPATH not applicable') + + testdir = os.path.join(self.unit_test_dir, '7 run installed') + self.init(testdir) + self.build() + self.install() + installed_exe = os.path.join(self.installdir, 'usr/bin/prog') + installed_libdir = os.path.join(self.installdir, 'usr/foo') + installed_lib = os.path.join(installed_libdir, 'libfoo.so') + self.assertTrue(os.path.isfile(installed_exe)) + self.assertTrue(os.path.isdir(installed_libdir)) + self.assertTrue(os.path.isfile(installed_lib)) + # Must fail when run without LD_LIBRARY_PATH to ensure that + # rpath has been properly stripped rather than pointing to the builddir. + self.assertNotEqual(subprocess.call(installed_exe, stderr=subprocess.DEVNULL), 0) + # When LD_LIBRARY_PATH is set it should start working. + # For some reason setting LD_LIBRARY_PATH in os.environ fails + # when all tests are run (but works when only this test is run), + # but doing this explicitly works. + env = os.environ.copy() + env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')]) + self.assertEqual(subprocess.call(installed_exe, env=env), 0) + # Ensure that introspect --installed works + installed = self.introspect('--installed') + for v in installed.values(): + self.assertTrue('prog' in v or 'foo' in v) + + @skipIfNoPkgconfig + def test_order_of_l_arguments(self): + testdir = os.path.join(self.unit_test_dir, '8 -L -l order') + self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir}) + # NOTE: .pc file has -Lfoo -lfoo -Lbar -lbar but pkg-config reorders + # the flags before returning them to -Lfoo -Lbar -lfoo -lbar + # but pkgconf seems to not do that. Sigh. Support both. + expected_order = [('-L/me/first', '-lfoo1'), + ('-L/me/second', '-lfoo2'), + ('-L/me/first', '-L/me/second'), + ('-lfoo1', '-lfoo2'), + ('-L/me/second', '-L/me/third'), + ('-L/me/third', '-L/me/fourth',), + ('-L/me/third', '-lfoo3'), + ('-L/me/fourth', '-lfoo4'), + ('-lfoo3', '-lfoo4'), + ] + with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as ifile: + for line in ifile: + if expected_order[0][0] in line: + for first, second in expected_order: + self.assertLess(line.index(first), line.index(second)) + return + raise RuntimeError('Linker entries not found in the Ninja file.') + + def test_introspect_dependencies(self): + ''' + Tests that mesonintrospect --dependencies returns expected output. + ''' + testdir = os.path.join(self.framework_test_dir, '7 gnome') + self.init(testdir) + glib_found = False + gobject_found = False + deps = self.introspect('--dependencies') + self.assertIsInstance(deps, list) + for dep in deps: + self.assertIsInstance(dep, dict) + self.assertIn('name', dep) + self.assertIn('compile_args', dep) + self.assertIn('link_args', dep) + if dep['name'] == 'glib-2.0': + glib_found = True + elif dep['name'] == 'gobject-2.0': + gobject_found = True + self.assertTrue(glib_found) + self.assertTrue(gobject_found) + if subprocess.call(['pkg-config', '--exists', 'glib-2.0 >= 2.56.2']) != 0: + raise unittest.SkipTest('glib >= 2.56.2 needed for the rest') + targets = self.introspect('--targets') + docbook_target = None + for t in targets: + if t['name'] == 'generated-gdbus-docbook': + docbook_target = t + break + self.assertIsInstance(docbook_target, dict) + self.assertEqual(os.path.basename(t['filename'][0]), 'generated-gdbus-doc-' + os.path.basename(t['target_sources'][0]['sources'][0])) + + def test_introspect_installed(self): + testdir = os.path.join(self.linuxlike_test_dir, '7 library versions') + self.init(testdir) + + install = self.introspect('--installed') + install = {os.path.basename(k): v for k, v in install.items()} + print(install) + if is_osx(): + the_truth = { + 'libmodule.dylib': '/usr/lib/libmodule.dylib', + 'libnoversion.dylib': '/usr/lib/libnoversion.dylib', + 'libonlysoversion.5.dylib': '/usr/lib/libonlysoversion.5.dylib', + 'libonlysoversion.dylib': '/usr/lib/libonlysoversion.dylib', + 'libonlyversion.1.dylib': '/usr/lib/libonlyversion.1.dylib', + 'libonlyversion.dylib': '/usr/lib/libonlyversion.dylib', + 'libsome.0.dylib': '/usr/lib/libsome.0.dylib', + 'libsome.dylib': '/usr/lib/libsome.dylib', + } + the_truth_2 = {'/usr/lib/libsome.dylib', + '/usr/lib/libsome.0.dylib', + } + else: + the_truth = { + 'libmodule.so': '/usr/lib/libmodule.so', + 'libnoversion.so': '/usr/lib/libnoversion.so', + 'libonlysoversion.so': '/usr/lib/libonlysoversion.so', + 'libonlysoversion.so.5': '/usr/lib/libonlysoversion.so.5', + 'libonlyversion.so': '/usr/lib/libonlyversion.so', + 'libonlyversion.so.1': '/usr/lib/libonlyversion.so.1', + 'libonlyversion.so.1.4.5': '/usr/lib/libonlyversion.so.1.4.5', + 'libsome.so': '/usr/lib/libsome.so', + 'libsome.so.0': '/usr/lib/libsome.so.0', + 'libsome.so.1.2.3': '/usr/lib/libsome.so.1.2.3', + } + the_truth_2 = {'/usr/lib/libsome.so', + '/usr/lib/libsome.so.0', + '/usr/lib/libsome.so.1.2.3'} + self.assertDictEqual(install, the_truth) + + targets = self.introspect('--targets') + for t in targets: + if t['name'] != 'some': + continue + self.assertSetEqual(the_truth_2, set(t['install_filename'])) + + def test_build_rpath(self): + if is_cygwin(): + raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') + testdir = os.path.join(self.unit_test_dir, '10 build_rpath') + self.init(testdir) + self.build() + build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) + self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar') + build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx')) + self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar') + self.install() + install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog')) + self.assertEqual(install_rpath, '/baz') + install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx')) + self.assertEqual(install_rpath, 'baz') + + @skipIfNoPkgconfig + def test_build_rpath_pkgconfig(self): + ''' + Test that current build artefacts (libs) are found first on the rpath, + manually specified rpath comes second and additional rpath elements (from + pkg-config files) come last + ''' + if is_cygwin(): + raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') + testdir = os.path.join(self.unit_test_dir, '90 pkgconfig build rpath order') + self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir}) + self.build() + build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) + self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar:/foo/dummy') + build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx')) + self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar:/foo/dummy') + self.install() + install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog')) + self.assertEqual(install_rpath, '/baz:/foo/dummy') + install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx')) + self.assertEqual(install_rpath, 'baz:/foo/dummy') + + def test_global_rpath(self): + if is_cygwin(): + raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') + if is_osx(): + raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)') + + testdir = os.path.join(self.unit_test_dir, '80 global-rpath') + oldinstalldir = self.installdir + + # Build and install an external library without DESTDIR. + # The external library generates a .pc file without an rpath. + yonder_dir = os.path.join(testdir, 'yonder') + yonder_prefix = os.path.join(oldinstalldir, 'yonder') + yonder_libdir = os.path.join(yonder_prefix, self.libdir) + self.prefix = yonder_prefix + self.installdir = yonder_prefix + self.init(yonder_dir) + self.build() + self.install(use_destdir=False) + + # Since rpath has multiple valid formats we need to + # test that they are all properly used. + rpath_formats = [ + ('-Wl,-rpath=', False), + ('-Wl,-rpath,', False), + ('-Wl,--just-symbols=', True), + ('-Wl,--just-symbols,', True), + ('-Wl,-R', False), + ('-Wl,-R,', False) + ] + for rpath_format, exception in rpath_formats: + # Build an app that uses that installed library. + # Supply the rpath to the installed library via LDFLAGS + # (as systems like buildroot and guix are wont to do) + # and verify install preserves that rpath. + self.new_builddir() + env = {'LDFLAGS': rpath_format + yonder_libdir, + 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} + if exception: + with self.assertRaises(subprocess.CalledProcessError): + self.init(testdir, override_envvars=env) + continue + self.init(testdir, override_envvars=env) + self.build() + self.install(use_destdir=False) + got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) + self.assertEqual(got_rpath, yonder_libdir, rpath_format) + + @skip_if_not_base_option('b_sanitize') + def test_pch_with_address_sanitizer(self): + if is_cygwin(): + raise unittest.SkipTest('asan not available on Cygwin') + if is_openbsd(): + raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD') + + testdir = os.path.join(self.common_test_dir, '13 pch') + self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false']) + self.build() + compdb = self.get_compdb() + for i in compdb: + self.assertIn("-fsanitize=address", i["command"]) + + def test_cross_find_program(self): + testdir = os.path.join(self.unit_test_dir, '11 cross prog') + crossfile = tempfile.NamedTemporaryFile(mode='w') + print(os.path.join(testdir, 'some_cross_tool.py')) + + tool_path = os.path.join(testdir, 'some_cross_tool.py') + + crossfile.write(textwrap.dedent(f'''\ + [binaries] + c = '{shutil.which('gcc' if is_sunos() else 'cc')}' + ar = '{shutil.which('ar')}' + strip = '{shutil.which('strip')}' + sometool.py = ['{tool_path}'] + someothertool.py = '{tool_path}' + + [properties] + + [host_machine] + system = 'linux' + cpu_family = 'arm' + cpu = 'armv7' # Not sure if correct. + endian = 'little' + ''')) + crossfile.flush() + self.meson_cross_file = crossfile.name + self.init(testdir) + + def test_reconfigure(self): + testdir = os.path.join(self.unit_test_dir, '13 reconfigure') + self.init(testdir, extra_args=['-Db_coverage=true'], default_args=False) + self.build('reconfigure') + + def test_vala_generated_source_buildir_inside_source_tree(self): + ''' + Test that valac outputs generated C files in the expected location when + the builddir is a subdir of the source tree. + ''' + if not shutil.which('valac'): + raise unittest.SkipTest('valac not installed.') + + testdir = os.path.join(self.vala_test_dir, '8 generated sources') + newdir = os.path.join(self.builddir, 'srctree') + shutil.copytree(testdir, newdir) + testdir = newdir + # New builddir + builddir = os.path.join(testdir, 'subdir/_build') + os.makedirs(builddir, exist_ok=True) + self.change_builddir(builddir) + self.init(testdir) + self.build() + + def test_old_gnome_module_codepaths(self): + ''' + A lot of code in the GNOME module is conditional on the version of the + glib tools that are installed, and breakages in the old code can slip + by once the CI has a newer glib version. So we force the GNOME module + to pretend that it's running on an ancient glib so the fallback code is + also tested. + ''' + testdir = os.path.join(self.framework_test_dir, '7 gnome') + mesonbuild.modules.gnome.native_glib_version = '2.20' + env = {'MESON_UNIT_TEST_PRETEND_GLIB_OLD': "1"} + try: + self.init(testdir, + inprocess=True, + override_envvars=env) + self.build(override_envvars=env) + finally: + mesonbuild.modules.gnome.native_glib_version = None + + @skipIfNoPkgconfig + def test_pkgconfig_usage(self): + testdir1 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependency') + testdir2 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependee') + if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) != 0: + raise unittest.SkipTest('Glib 2.0 dependency not available.') + with tempfile.TemporaryDirectory() as tempdirname: + self.init(testdir1, extra_args=['--prefix=' + tempdirname, '--libdir=lib'], default_args=False) + self.install(use_destdir=False) + shutil.rmtree(self.builddir) + os.mkdir(self.builddir) + pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig') + self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc'))) + lib_dir = os.path.join(tempdirname, 'lib') + myenv = os.environ.copy() + myenv['PKG_CONFIG_PATH'] = pkg_dir + # Private internal libraries must not leak out. + pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep'], env=myenv) + self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.') + # Dependencies must not leak to cflags when building only a shared library. + pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep'], env=myenv) + self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.') + # Test that the result is usable. + self.init(testdir2, override_envvars=myenv) + self.build(override_envvars=myenv) + myenv = os.environ.copy() + myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')]) + if is_cygwin(): + bin_dir = os.path.join(tempdirname, 'bin') + myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH'] + self.assertTrue(os.path.isdir(lib_dir)) + test_exe = os.path.join(self.builddir, 'pkguser') + self.assertTrue(os.path.isfile(test_exe)) + subprocess.check_call(test_exe, env=myenv) + + @skipIfNoPkgconfig + def test_pkgconfig_relative_paths(self): + testdir = os.path.join(self.unit_test_dir, '62 pkgconfig relative paths') + pkg_dir = os.path.join(testdir, 'pkgconfig') + self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'librelativepath.pc'))) + + env = get_fake_env(testdir, self.builddir, self.prefix) + env.coredata.set_options({OptionKey('pkg_config_path'): pkg_dir}, subproject='') + kwargs = {'required': True, 'silent': True} + relative_path_dep = PkgConfigDependency('librelativepath', env, kwargs) + self.assertTrue(relative_path_dep.found()) + + # Ensure link_args are properly quoted + libpath = Path(self.builddir) / '../relativepath/lib' + link_args = ['-L' + libpath.as_posix(), '-lrelativepath'] + self.assertEqual(relative_path_dep.get_link_args(), link_args) + + @skipIfNoPkgconfig + def test_pkgconfig_duplicate_path_entries(self): + testdir = os.path.join(self.unit_test_dir, '111 pkgconfig duplicate path entries') + pkg_dir = os.path.join(testdir, 'pkgconfig') + + env = get_fake_env(testdir, self.builddir, self.prefix) + env.coredata.set_options({OptionKey('pkg_config_path'): pkg_dir}, subproject='') + + PkgConfigDependency.setup_env({}, env, MachineChoice.HOST, pkg_dir) + pkg_config_path = env.coredata.options[OptionKey('pkg_config_path')].value + self.assertTrue(len(pkg_config_path) == 1) + + @skipIfNoPkgconfig + def test_pkgconfig_internal_libraries(self): + ''' + ''' + with tempfile.TemporaryDirectory() as tempdirname: + # build library + testdirbase = os.path.join(self.unit_test_dir, '32 pkgconfig use libraries') + testdirlib = os.path.join(testdirbase, 'lib') + self.init(testdirlib, extra_args=['--prefix=' + tempdirname, + '--libdir=lib', + '--default-library=static'], default_args=False) + self.build() + self.install(use_destdir=False) + + # build user of library + pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig') + self.new_builddir() + self.init(os.path.join(testdirbase, 'app'), + override_envvars={'PKG_CONFIG_PATH': pkg_dir}) + self.build() + + @skipIfNoPkgconfig + def test_static_archive_stripping(self): + ''' + Check that Meson produces valid static archives with --strip enabled + ''' + with tempfile.TemporaryDirectory() as tempdirname: + testdirbase = os.path.join(self.unit_test_dir, '66 static archive stripping') + + # build lib + self.new_builddir() + testdirlib = os.path.join(testdirbase, 'lib') + testlibprefix = os.path.join(tempdirname, 'libprefix') + self.init(testdirlib, extra_args=['--prefix=' + testlibprefix, + '--libdir=lib', + '--default-library=static', + '--buildtype=debug', + '--strip'], default_args=False) + self.build() + self.install(use_destdir=False) + + # build executable (uses lib, fails if static archive has been stripped incorrectly) + pkg_dir = os.path.join(testlibprefix, 'lib/pkgconfig') + self.new_builddir() + self.init(os.path.join(testdirbase, 'app'), + override_envvars={'PKG_CONFIG_PATH': pkg_dir}) + self.build() + + @skipIfNoPkgconfig + def test_pkgconfig_formatting(self): + testdir = os.path.join(self.unit_test_dir, '38 pkgconfig format') + self.init(testdir) + myenv = os.environ.copy() + myenv['PKG_CONFIG_PATH'] = self.privatedir + stdo = subprocess.check_output(['pkg-config', '--libs-only-l', 'libsomething'], env=myenv) + deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething'] + if is_windows() or is_cygwin() or is_osx() or is_openbsd(): + # On Windows, libintl is a separate library + deps.append(b'-lintl') + self.assertEqual(set(deps), set(stdo.split())) + + @skipIfNoPkgconfig + @skip_if_not_language('cs') + def test_pkgconfig_csharp_library(self): + testdir = os.path.join(self.unit_test_dir, '50 pkgconfig csharp library') + self.init(testdir) + myenv = os.environ.copy() + myenv['PKG_CONFIG_PATH'] = self.privatedir + stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv) + + self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip()) + + @skipIfNoPkgconfig + def test_pkgconfig_link_order(self): + ''' + Test that libraries are listed before their dependencies. + ''' + testdir = os.path.join(self.unit_test_dir, '53 pkgconfig static link order') + self.init(testdir) + myenv = os.environ.copy() + myenv['PKG_CONFIG_PATH'] = self.privatedir + stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv) + deps = stdo.split() + self.assertTrue(deps.index(b'-lsomething') < deps.index(b'-ldependency')) + + def test_deterministic_dep_order(self): + ''' + Test that the dependencies are always listed in a deterministic order. + ''' + testdir = os.path.join(self.unit_test_dir, '43 dep order') + self.init(testdir) + with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile: + for line in bfile: + if 'build myexe:' in line or 'build myexe.exe:' in line: + self.assertIn('liblib1.a liblib2.a', line) + return + raise RuntimeError('Could not find the build rule') + + def test_deterministic_rpath_order(self): + ''' + Test that the rpaths are always listed in a deterministic order. + ''' + if is_cygwin(): + raise unittest.SkipTest('rpath are not used on Cygwin') + testdir = os.path.join(self.unit_test_dir, '42 rpath order') + self.init(testdir) + if is_osx(): + rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2') + else: + rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2') + with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile: + for line in bfile: + if '-rpath' in line: + self.assertRegex(line, rpathre) + return + raise RuntimeError('Could not find the rpath') + + def test_override_with_exe_dep(self): + ''' + Test that we produce the correct dependencies when a program is overridden with an executable. + ''' + testdir = os.path.join(self.src_root, 'test cases', 'native', '9 override with exe') + self.init(testdir) + with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile: + for line in bfile: + if 'main1.c:' in line or 'main2.c:' in line: + self.assertIn('| subprojects/sub/foobar', line) + + @skipIfNoPkgconfig + def test_usage_external_library(self): + ''' + Test that uninstalled usage of an external library (from the system or + PkgConfigDependency) works. On macOS, this workflow works out of the + box. On Linux, BSDs, Windows, etc, you need to set extra arguments such + as LD_LIBRARY_PATH, etc, so this test is skipped. + + The system library is found with cc.find_library() and pkg-config deps. + ''' + oldprefix = self.prefix + # Install external library so we can find it + testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'external library') + # install into installdir without using DESTDIR + installdir = self.installdir + self.prefix = installdir + self.init(testdir) + self.prefix = oldprefix + self.build() + self.install(use_destdir=False) + ## New builddir for the consumer + self.new_builddir() + env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir), + 'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')} + testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'built library') + # install into installdir without using DESTDIR + self.prefix = self.installdir + self.init(testdir, override_envvars=env) + self.prefix = oldprefix + self.build(override_envvars=env) + # test uninstalled + self.run_tests(override_envvars=env) + if not (is_osx() or is_linux()): + return + # test running after installation + self.install(use_destdir=False) + prog = os.path.join(self.installdir, 'bin', 'prog') + self._run([prog]) + if not is_osx(): + # Rest of the workflow only works on macOS + return + out = self._run(['otool', '-L', prog]) + self.assertNotIn('@rpath', out) + ## New builddir for testing that DESTDIR is not added to install_name + self.new_builddir() + # install into installdir with DESTDIR + self.init(testdir, override_envvars=env) + self.build(override_envvars=env) + # test running after installation + self.install(override_envvars=env) + prog = self.installdir + os.path.join(self.prefix, 'bin', 'prog') + lib = self.installdir + os.path.join(self.prefix, 'lib', 'libbar_built.dylib') + for f in prog, lib: + out = self._run(['otool', '-L', f]) + # Ensure that the otool output does not contain self.installdir + self.assertNotRegex(out, self.installdir + '.*dylib ') + + @skipIfNoPkgconfig + def test_link_arg_fullname(self): + ''' + Test for support of -l:libfullname.a + see: https://github.com/mesonbuild/meson/issues/9000 + https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a + ''' + testdir = os.path.join(self.unit_test_dir, '97 link full name','libtestprovider') + oldprefix = self.prefix + # install into installdir without using DESTDIR + installdir = self.installdir + self.prefix = installdir + self.init(testdir) + self.prefix=oldprefix + self.build() + self.install(use_destdir=False) + + self.new_builddir() + env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir), + 'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')} + testdir = os.path.join(self.unit_test_dir, '97 link full name','proguser') + self.init(testdir,override_envvars=env) + + # test for link with full path + with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile: + for line in bfile: + if 'build dprovidertest:' in line: + self.assertIn('/libtestprovider.a', line) + + if is_osx(): + # macOS's ld do not supports `--whole-archive`, skip build & run + return + + self.build(override_envvars=env) + + # skip test if pkg-config is too old. + # before v0.28, Libs flags like -Wl will not kept in context order with -l flags. + # see https://gitlab.freedesktop.org/pkg-config/pkg-config/-/blob/master/NEWS + pkgconfigver = subprocess.check_output(['pkg-config', '--version']) + if b'0.28' > pkgconfigver: + raise unittest.SkipTest('pkg-config is too old to be correctly done this.') + self.run_tests() + + @skipIfNoPkgconfig + def test_usage_pkgconfig_prefixes(self): + ''' + Build and install two external libraries, to different prefixes, + then build and install a client program that finds them via pkgconfig, + and verify the installed client program runs. + ''' + oldinstalldir = self.installdir + + # Build and install both external libraries without DESTDIR + val1dir = os.path.join(self.unit_test_dir, '75 pkgconfig prefixes', 'val1') + val1prefix = os.path.join(oldinstalldir, 'val1') + self.prefix = val1prefix + self.installdir = val1prefix + self.init(val1dir) + self.build() + self.install(use_destdir=False) + self.new_builddir() + + env1 = {} + env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig') + val2dir = os.path.join(self.unit_test_dir, '75 pkgconfig prefixes', 'val2') + val2prefix = os.path.join(oldinstalldir, 'val2') + self.prefix = val2prefix + self.installdir = val2prefix + self.init(val2dir, override_envvars=env1) + self.build() + self.install(use_destdir=False) + self.new_builddir() + + # Build, install, and run the client program + env2 = {} + env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig') + testdir = os.path.join(self.unit_test_dir, '75 pkgconfig prefixes', 'client') + testprefix = os.path.join(oldinstalldir, 'client') + self.prefix = testprefix + self.installdir = testprefix + self.init(testdir, override_envvars=env2) + self.build() + self.install(use_destdir=False) + prog = os.path.join(self.installdir, 'bin', 'client') + env3 = {} + if is_cygwin(): + env3['PATH'] = os.path.join(val1prefix, 'bin') + \ + os.pathsep + \ + os.path.join(val2prefix, 'bin') + \ + os.pathsep + os.environ['PATH'] + out = self._run([prog], override_envvars=env3).strip() + # Expected output is val1 + val2 = 3 + self.assertEqual(out, '3') + + def install_subdir_invalid_symlinks(self, testdir, subdir_path): + ''' + Test that installation of broken symlinks works fine. + https://github.com/mesonbuild/meson/issues/3914 + ''' + testdir = os.path.join(self.common_test_dir, testdir) + subdir = os.path.join(testdir, subdir_path) + with chdir(subdir): + # Can't distribute broken symlinks in the source tree because it breaks + # the creation of zipapps. Create it dynamically and run the test by + # hand. + src = '../../nonexistent.txt' + os.symlink(src, 'invalid-symlink.txt') + try: + self.init(testdir) + self.build() + self.install() + install_path = subdir_path.split(os.path.sep)[-1] + link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt') + self.assertTrue(os.path.islink(link), msg=link) + self.assertEqual(src, os.readlink(link)) + self.assertFalse(os.path.isfile(link), msg=link) + finally: + os.remove(os.path.join(subdir, 'invalid-symlink.txt')) + + def test_install_subdir_symlinks(self): + self.install_subdir_invalid_symlinks('59 install subdir', os.path.join('sub', 'sub1')) + + def test_install_subdir_symlinks_with_default_umask(self): + self.install_subdir_invalid_symlinks('190 install_mode', 'sub2') + + def test_install_subdir_symlinks_with_default_umask_and_mode(self): + self.install_subdir_invalid_symlinks('190 install_mode', 'sub1') + + @skipIfNoPkgconfigDep('gmodule-2.0') + def test_ldflag_dedup(self): + testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup') + if is_cygwin() or is_osx(): + raise unittest.SkipTest('Not applicable on Cygwin or OSX.') + env = get_fake_env() + cc = detect_c_compiler(env, MachineChoice.HOST) + linker = cc.linker + if not linker.export_dynamic_args(env): + raise unittest.SkipTest('Not applicable for linkers without --export-dynamic') + self.init(testdir) + build_ninja = os.path.join(self.builddir, 'build.ninja') + max_count = 0 + search_term = '-Wl,--export-dynamic' + with open(build_ninja, encoding='utf-8') as f: + for line in f: + max_count = max(max_count, line.count(search_term)) + self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.') + + def test_compiler_libs_static_dedup(self): + testdir = os.path.join(self.unit_test_dir, '56 dedup compiler libs') + self.init(testdir) + build_ninja = os.path.join(self.builddir, 'build.ninja') + with open(build_ninja, encoding='utf-8') as f: + lines = f.readlines() + for lib in ('-ldl', '-lm', '-lc', '-lrt'): + for line in lines: + if lib not in line: + continue + # Assert that + self.assertEqual(len(line.split(lib)), 2, msg=(lib, line)) + + @skipIfNoPkgconfig + def test_noncross_options(self): + # C_std defined in project options must be in effect also when native compiling. + testdir = os.path.join(self.unit_test_dir, '51 noncross options') + self.init(testdir, extra_args=['-Dpkg_config_path=' + testdir]) + compdb = self.get_compdb() + self.assertEqual(len(compdb), 2) + self.assertRegex(compdb[0]['command'], '-std=c99') + self.assertRegex(compdb[1]['command'], '-std=c99') + self.build() + + def test_identity_cross(self): + testdir = os.path.join(self.unit_test_dir, '61 identity cross') + + nativefile = tempfile.NamedTemporaryFile(mode='w') + nativefile.write(textwrap.dedent('''\ + [binaries] + c = ['{}'] + '''.format(os.path.join(testdir, 'build_wrapper.py')))) + nativefile.flush() + self.meson_native_file = nativefile.name + + crossfile = tempfile.NamedTemporaryFile(mode='w') + crossfile.write(textwrap.dedent('''\ + [binaries] + c = ['{}'] + '''.format(os.path.join(testdir, 'host_wrapper.py')))) + crossfile.flush() + self.meson_cross_file = crossfile.name + + # TODO should someday be explicit about build platform only here + self.init(testdir) + + def test_identity_cross_env(self): + testdir = os.path.join(self.unit_test_dir, '61 identity cross') + env = { + 'CC_FOR_BUILD': '"' + os.path.join(testdir, 'build_wrapper.py') + '"', + } + crossfile = tempfile.NamedTemporaryFile(mode='w') + crossfile.write(textwrap.dedent('''\ + [binaries] + c = ['{}'] + '''.format(os.path.join(testdir, 'host_wrapper.py')))) + crossfile.flush() + self.meson_cross_file = crossfile.name + # TODO should someday be explicit about build platform only here + self.init(testdir, override_envvars=env) + + @skipIfNoPkgconfig + def test_static_link(self): + if is_cygwin(): + raise unittest.SkipTest("Cygwin doesn't support LD_LIBRARY_PATH.") + + # Build some libraries and install them + testdir = os.path.join(self.unit_test_dir, '67 static link/lib') + libdir = os.path.join(self.installdir, self.libdir) + oldprefix = self.prefix + self.prefix = self.installdir + self.init(testdir) + self.install(use_destdir=False) + + # Test that installed libraries works + self.new_builddir() + self.prefix = oldprefix + meson_args = [f'-Dc_link_args=-L{libdir}', + '--fatal-meson-warnings'] + testdir = os.path.join(self.unit_test_dir, '67 static link') + env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')} + self.init(testdir, extra_args=meson_args, override_envvars=env) + self.build() + self.run_tests() + + def _check_ld(self, check: str, name: str, lang: str, expected: str) -> None: + if is_sunos(): + raise unittest.SkipTest('Solaris currently cannot override the linker.') + if not shutil.which(check): + raise unittest.SkipTest(f'Could not find {check}.') + envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP[f'{lang}_ld']] + + # Also test a deprecated variable if there is one. + if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP: + envvars.append( + mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP[f'{lang}_ld']) + + for envvar in envvars: + with mock.patch.dict(os.environ, {envvar: name}): + env = get_fake_env() + comp = compiler_from_language(env, lang, MachineChoice.HOST) + if isinstance(comp, (mesonbuild.compilers.AppleClangCCompiler, + mesonbuild.compilers.AppleClangCPPCompiler, + mesonbuild.compilers.AppleClangObjCCompiler, + mesonbuild.compilers.AppleClangObjCPPCompiler)): + raise unittest.SkipTest('AppleClang is currently only supported with ld64') + if lang != 'rust' and comp.use_linker_args('bfd') == []: + raise unittest.SkipTest( + f'Compiler {comp.id} does not support using alternative linkers') + self.assertEqual(comp.linker.id, expected) + + def test_ld_environment_variable_bfd(self): + self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd') + + def test_ld_environment_variable_gold(self): + self._check_ld('ld.gold', 'gold', 'c', 'ld.gold') + + def test_ld_environment_variable_lld(self): + self._check_ld('ld.lld', 'lld', 'c', 'ld.lld') + + @skip_if_not_language('rust') + @skipIfNoExecutable('ld.gold') # need an additional check here because _check_ld checks for gcc + def test_ld_environment_variable_rust(self): + self._check_ld('gcc', 'gcc -fuse-ld=gold', 'rust', 'ld.gold') + + def test_ld_environment_variable_cpp(self): + self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold') + + @skip_if_not_language('objc') + def test_ld_environment_variable_objc(self): + self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold') + + @skip_if_not_language('objcpp') + def test_ld_environment_variable_objcpp(self): + self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold') + + @skip_if_not_language('fortran') + def test_ld_environment_variable_fortran(self): + self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold') + + @skip_if_not_language('d') + def test_ld_environment_variable_d(self): + # At least for me, ldc defaults to gold, and gdc defaults to bfd, so + # let's pick lld, which isn't the default for either (currently) + if is_osx(): + expected = 'ld64' + else: + expected = 'ld.lld' + self._check_ld('ld.lld', 'lld', 'd', expected) + + def compute_sha256(self, filename): + with open(filename, 'rb') as f: + return hashlib.sha256(f.read()).hexdigest() + + def test_wrap_with_file_url(self): + testdir = os.path.join(self.unit_test_dir, '73 wrap file url') + source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz') + patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz') + wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap') + source_hash = self.compute_sha256(source_filename) + patch_hash = self.compute_sha256(patch_filename) + wrap = textwrap.dedent("""\ + [wrap-file] + directory = foo + + source_url = http://server.invalid/foo + source_fallback_url = file://{} + source_filename = foo.tar.xz + source_hash = {} + + patch_url = http://server.invalid/foo + patch_fallback_url = file://{} + patch_filename = foo-patch.tar.xz + patch_hash = {} + """.format(source_filename, source_hash, patch_filename, patch_hash)) + with open(wrap_filename, 'w', encoding='utf-8') as f: + f.write(wrap) + self.init(testdir) + self.build() + self.run_tests() + + windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'packagecache')) + windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo')) + os.unlink(wrap_filename) + + def test_no_rpath_for_static(self): + testdir = os.path.join(self.common_test_dir, '5 linkstatic') + self.init(testdir) + self.build() + build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) + self.assertIsNone(build_rpath) + + def test_lookup_system_after_broken_fallback(self): + # Just to generate libfoo.pc so we can test system dependency lookup. + testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen') + self.init(testdir) + privatedir = self.privatedir + + # Write test project where the first dependency() returns not-found + # because 'broken' subproject does not exit, but that should not prevent + # the 2nd dependency() to lookup on system. + self.new_builddir() + with tempfile.TemporaryDirectory() as d: + with open(os.path.join(d, 'meson.build'), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent('''\ + project('test') + dependency('notfound', fallback: 'broken', required: false) + dependency('libfoo', fallback: 'broken', required: true) + ''')) + self.init(d, override_envvars={'PKG_CONFIG_LIBDIR': privatedir}) + + def test_as_link_whole(self): + testdir = os.path.join(self.unit_test_dir, '77 as link whole') + self.init(testdir) + with open(os.path.join(self.privatedir, 'bar1.pc'), encoding='utf-8') as f: + content = f.read() + self.assertIn('-lfoo', content) + with open(os.path.join(self.privatedir, 'bar2.pc'), encoding='utf-8') as f: + content = f.read() + self.assertNotIn('-lfoo', content) + + def test_prelinking(self): + # Prelinking currently only works on recently new GNU toolchains. + # Skip everything else. When support for other toolchains is added, + # remove limitations as necessary. + if is_osx(): + raise unittest.SkipTest('Prelinking not supported on Darwin.') + if 'clang' in os.environ.get('CC', 'dummy'): + raise unittest.SkipTest('Prelinking not supported with Clang.') + gccver = subprocess.check_output(['cc', '--version']) + if b'7.5.0' in gccver: + raise unittest.SkipTest('GCC on Bionic is too old to be supported.') + testdir = os.path.join(self.unit_test_dir, '87 prelinking') + self.init(testdir) + self.build() + outlib = os.path.join(self.builddir, 'libprelinked.a') + ar = shutil.which('ar') + self.assertTrue(os.path.exists(outlib)) + self.assertTrue(ar is not None) + p = subprocess.run([ar, 't', outlib], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + universal_newlines=True, timeout=1) + obj_files = p.stdout.strip().split('\n') + self.assertEqual(len(obj_files), 1) + self.assertTrue(obj_files[0].endswith('-prelink.o')) + + def do_one_test_with_nativefile(self, testdir, args): + testdir = os.path.join(self.common_test_dir, testdir) + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'nativefile' + with p.open('wt', encoding='utf-8') as f: + f.write(f'''[binaries] + c = {args} + ''') + self.init(testdir, extra_args=['--native-file=' + str(p)]) + self.build() + + def test_cmake_multilib(self): + ''' + Test that the cmake module handles multilib paths correctly. + ''' + # Verify that "gcc -m32" works + try: + self.do_one_test_with_nativefile('1 trivial', "['gcc', '-m32']") + except subprocess.CalledProcessError as e: + raise unittest.SkipTest('Not GCC, or GCC does not have the -m32 option') + self.wipe() + + # Verify that cmake works + try: + self.do_one_test_with_nativefile('../cmake/1 basic', "['gcc']") + except subprocess.CalledProcessError as e: + raise unittest.SkipTest('Could not build basic cmake project') + self.wipe() + + # If so, we can test that cmake works with "gcc -m32" + self.do_one_test_with_nativefile('../cmake/1 basic', "['gcc', '-m32']") + +class BaseLinuxCrossTests(BasePlatformTests): + # Don't pass --libdir when cross-compiling. We have tests that + # check whether meson auto-detects it correctly. + libdir = None + + +def should_run_cross_arm_tests(): + return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm') + +@unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM") +class LinuxCrossArmTests(BaseLinuxCrossTests): + ''' + Tests that cross-compilation to Linux/ARM works + ''' + + def setUp(self): + super().setUp() + src_root = os.path.dirname(__file__) + self.meson_cross_file = os.path.join(src_root, 'cross', 'ubuntu-armhf.txt') + + def test_cflags_cross_environment_pollution(self): + ''' + Test that the CFLAGS environment variable does not pollute the cross + environment. This can't be an ordinary test case because we need to + inspect the compiler database. + ''' + testdir = os.path.join(self.common_test_dir, '3 static') + self.init(testdir, override_envvars={'CFLAGS': '-DBUILD_ENVIRONMENT_ONLY'}) + compdb = self.get_compdb() + self.assertNotIn('-DBUILD_ENVIRONMENT_ONLY', compdb[0]['command']) + + def test_cross_file_overrides_always_args(self): + ''' + Test that $lang_args in cross files always override get_always_args(). + Needed for overriding the default -D_FILE_OFFSET_BITS=64 on some + architectures such as some Android versions and Raspbian. + https://github.com/mesonbuild/meson/issues/3049 + https://github.com/mesonbuild/meson/issues/3089 + ''' + testdir = os.path.join(self.unit_test_dir, '33 cross file overrides always args') + self.meson_cross_file = os.path.join(testdir, 'ubuntu-armhf-overrides.txt') + self.init(testdir) + compdb = self.get_compdb() + self.assertRegex(compdb[0]['command'], '-D_FILE_OFFSET_BITS=64.*-U_FILE_OFFSET_BITS') + self.build() + + def test_cross_libdir(self): + # When cross compiling "libdir" should default to "lib" + # rather than "lib/x86_64-linux-gnu" or something like that. + testdir = os.path.join(self.common_test_dir, '1 trivial') + self.init(testdir) + for i in self.introspect('--buildoptions'): + if i['name'] == 'libdir': + self.assertEqual(i['value'], 'lib') + return + self.assertTrue(False, 'Option libdir not in introspect data.') + + def test_cross_libdir_subproject(self): + # Guard against a regression where calling "subproject" + # would reset the value of libdir to its default value. + testdir = os.path.join(self.unit_test_dir, '76 subdir libdir') + self.init(testdir, extra_args=['--libdir=fuf']) + for i in self.introspect('--buildoptions'): + if i['name'] == 'libdir': + self.assertEqual(i['value'], 'fuf') + return + self.assertTrue(False, 'Libdir specified on command line gets reset.') + + def test_std_remains(self): + # C_std defined in project options must be in effect also when cross compiling. + testdir = os.path.join(self.unit_test_dir, '51 noncross options') + self.init(testdir) + compdb = self.get_compdb() + self.assertRegex(compdb[0]['command'], '-std=c99') + self.build() + + @skipIfNoPkgconfig + def test_pkg_config_option(self): + if not shutil.which('arm-linux-gnueabihf-pkg-config'): + raise unittest.SkipTest('Cross-pkgconfig not found.') + testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option') + self.init(testdir, extra_args=[ + '-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'), + '-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'), + ]) + + def test_run_native_test(self): + ''' + https://github.com/mesonbuild/meson/issues/7997 + check run native test in crossbuild without exe wrapper + ''' + testdir = os.path.join(self.unit_test_dir, '88 run native test') + stamp_file = os.path.join(self.builddir, 'native_test_has_run.stamp') + self.init(testdir) + self.build() + self.assertPathDoesNotExist(stamp_file) + self.run_tests() + self.assertPathExists(stamp_file) + + +def should_run_cross_mingw_tests(): + return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin()) + +@unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW") +class LinuxCrossMingwTests(BaseLinuxCrossTests): + ''' + Tests that cross-compilation to Windows/MinGW works + ''' + + def setUp(self): + super().setUp() + src_root = os.path.dirname(__file__) + self.meson_cross_file = os.path.join(src_root, 'cross', 'linux-mingw-w64-64bit.txt') + + def test_exe_wrapper_behaviour(self): + ''' + Test that an exe wrapper that isn't found doesn't cause compiler sanity + checks and compiler checks to fail, but causes configure to fail if it + requires running a cross-built executable (custom_target or run_target) + and causes the tests to be skipped if they are run. + ''' + testdir = os.path.join(self.unit_test_dir, '36 exe_wrapper behaviour') + # Configures, builds, and tests fine by default + self.init(testdir) + self.build() + self.run_tests() + self.wipe() + os.mkdir(self.builddir) + # Change cross file to use a non-existing exe_wrapper and it should fail + self.meson_cross_file = os.path.join(testdir, 'broken-cross.txt') + # Force tracebacks so we can detect them properly + env = {'MESON_FORCE_BACKTRACE': '1'} + error_message = "An exe_wrapper is needed but was not found. Please define one in cross file and check the command and/or add it to PATH." + + with self.assertRaises(MesonException) as cm: + # Must run in-process or we'll get a generic CalledProcessError + self.init(testdir, extra_args='-Drun-target=false', + inprocess=True, + override_envvars=env) + self.assertEqual(str(cm.exception), error_message) + + with self.assertRaises(MesonException) as cm: + # Must run in-process or we'll get a generic CalledProcessError + self.init(testdir, extra_args='-Dcustom-target=false', + inprocess=True, + override_envvars=env) + self.assertEqual(str(cm.exception), error_message) + + self.init(testdir, extra_args=['-Dcustom-target=false', '-Drun-target=false'], + override_envvars=env) + self.build() + + with self.assertRaises(MesonException) as cm: + # Must run in-process or we'll get a generic CalledProcessError + self.run_tests(inprocess=True, override_envvars=env) + self.assertEqual(str(cm.exception), + "The exe_wrapper defined in the cross file 'broken' was not found. Please check the command and/or add it to PATH.") + + @skipIfNoPkgconfig + def test_cross_pkg_config_option(self): + testdir = os.path.join(self.unit_test_dir, '58 pkg_config_path option') + self.init(testdir, extra_args=[ + '-Dbuild.pkg_config_path=' + os.path.join(testdir, 'build_extra_path'), + '-Dpkg_config_path=' + os.path.join(testdir, 'host_extra_path'), + ]) + + +class PythonTests(BasePlatformTests): + ''' + Tests that verify compilation of python extension modules + ''' + + def test_versions(self): + if self.backend is not Backend.ninja: + raise unittest.SkipTest(f'Skipping python tests with {self.backend.name} backend') + + testdir = os.path.join(self.src_root, 'test cases', 'unit', '39 python extmodule') + + # No python version specified, this will use meson's python + self.init(testdir) + self.build() + self.run_tests() + self.wipe() + + # When specifying a known name, (python2 / python3) the module + # will also try 'python' as a fallback and use it if the major + # version matches + try: + self.init(testdir, extra_args=['-Dpython=python2']) + self.build() + self.run_tests() + except unittest.SkipTest: + # python2 is not necessarily installed on the test machine, + # if it is not, or the python headers can't be found, the test + # will raise MESON_SKIP_TEST, we could check beforehand what version + # of python is available, but it's a bit of a chicken and egg situation, + # as that is the job of the module, so we just ask for forgiveness rather + # than permission. + pass + + self.wipe() + + for py in ('pypy', 'pypy3'): + try: + self.init(testdir, extra_args=['-Dpython=%s' % py]) + except unittest.SkipTest: + # Same as above, pypy2 and pypy3 are not expected to be present + # on the test system, the test project only raises in these cases + continue + + # We have a pypy, this is expected to work + self.build() + self.run_tests() + self.wipe() + + # The test is configured to error out with MESON_SKIP_TEST + # in case it could not find python + with self.assertRaises(unittest.SkipTest): + self.init(testdir, extra_args=['-Dpython=not-python']) + self.wipe() + + # While dir is an external command on both Windows and Linux, + # it certainly isn't python + with self.assertRaises(unittest.SkipTest): + self.init(testdir, extra_args=['-Dpython=dir']) + self.wipe() + + +class RewriterTests(BasePlatformTests): + def setUp(self): + super().setUp() + self.maxDiff = None + + def prime(self, dirname): + copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir) + + def rewrite_raw(self, directory, args): + if isinstance(args, str): + args = [args] + command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args + p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + universal_newlines=True, timeout=60) + print('STDOUT:') + print(p.stdout) + print('STDERR:') + print(p.stderr) + if p.returncode != 0: + if 'MESON_SKIP_TEST' in p.stdout: + raise unittest.SkipTest('Project requested skipping.') + raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout) + if not p.stderr: + return {} + return json.loads(p.stderr) + + def rewrite(self, directory, args): + if isinstance(args, str): + args = [args] + return self.rewrite_raw(directory, ['command'] + args) + + def test_target_source_list(self): + self.prime('1 basic') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'target': { + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']}, + 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']}, + } + } + self.assertDictEqual(out, expected) + + def test_target_add_sources(self): + self.prime('1 basic') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) + expected = { + 'target': { + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp']}, + 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, + 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}, + } + } + self.assertDictEqual(out, expected) + + # Check the written file + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + self.assertDictEqual(out, expected) + + def test_target_add_sources_abs(self): + self.prime('1 basic') + abs_src = [os.path.join(self.builddir, x) for x in ['a1.cpp', 'a2.cpp', 'a6.cpp']] + add = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "src_add", "sources": abs_src}]) + inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}]) + self.rewrite(self.builddir, add) + out = self.rewrite(self.builddir, inf) + expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}}} + self.assertDictEqual(out, expected) + + def test_target_remove_sources(self): + self.prime('1 basic') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json')) + expected = { + 'target': { + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp']}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']}, + 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']}, + 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp']}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp']}, + 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp']}, + } + } + self.assertDictEqual(out, expected) + + # Check the written file + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + self.assertDictEqual(out, expected) + + def test_target_subdir(self): + self.prime('2 subdirs') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) + expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']} + self.assertDictEqual(list(out['target'].values())[0], expected) + + # Check the written file + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + self.assertDictEqual(list(out['target'].values())[0], expected) + + def test_target_remove(self): + self.prime('1 basic') + self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + + expected = { + 'target': { + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']}, + 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']}, + } + } + self.assertDictEqual(out, expected) + + def test_tatrget_add(self): + self.prime('1 basic') + self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + + expected = { + 'target': { + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']}, + 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']}, + 'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp']}, + } + } + self.assertDictEqual(out, expected) + + def test_target_remove_subdir(self): + self.prime('2 subdirs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + self.assertDictEqual(out, {}) + + def test_target_add_subdir(self): + self.prime('2 subdirs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = {'name': 'something', 'sources': ['first.c', 'second.c']} + self.assertDictEqual(out['target']['94b671c@@something@exe'], expected) + + def test_target_source_sorting(self): + self.prime('5 sorting') + add_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'src_add', 'sources': ['a666.c']}]) + inf_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'info'}]) + out = self.rewrite(self.builddir, add_json) + out = self.rewrite(self.builddir, inf_json) + expected = { + 'target': { + 'exe1@exe': { + 'name': 'exe1', + 'sources': [ + 'aaa/a/a1.c', + 'aaa/b/b1.c', + 'aaa/b/b2.c', + 'aaa/f1.c', + 'aaa/f2.c', + 'aaa/f3.c', + 'bbb/a/b1.c', + 'bbb/b/b2.c', + 'bbb/c1/b5.c', + 'bbb/c2/b7.c', + 'bbb/c10/b6.c', + 'bbb/a4.c', + 'bbb/b3.c', + 'bbb/b4.c', + 'bbb/b5.c', + 'a1.c', + 'a2.c', + 'a3.c', + 'a10.c', + 'a20.c', + 'a30.c', + 'a100.c', + 'a101.c', + 'a110.c', + 'a210.c', + 'a666.c', + 'b1.c', + 'c2.c' + ] + } + } + } + self.assertDictEqual(out, expected) + + def test_target_same_name_skip(self): + self.prime('4 same name targets') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = {'name': 'myExe', 'sources': ['main.cpp']} + self.assertEqual(len(out['target']), 2) + for val in out['target'].values(): + self.assertDictEqual(expected, val) + + def test_kwargs_info(self): + self.prime('3 kwargs') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.1'}, + 'target#tgt1': {'build_by_default': True}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + + def test_kwargs_set(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'set.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']}, + 'target#tgt1': {'build_by_default': False, 'build_rpath': '/usr/local', 'dependencies': 'dep1'}, + 'dependency#dep1': {'required': True, 'method': 'cmake'} + } + } + self.assertDictEqual(out, expected) + + def test_kwargs_add(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'add.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD', 'Boost']}, + 'target#tgt1': {'build_by_default': True}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + + def test_kwargs_remove(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'remove.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.1', 'license': 'GPL'}, + 'target#tgt1': {'build_by_default': True}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + + def test_kwargs_remove_regex(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'remove_regex.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.1', 'default_options': 'debug=true'}, + 'target#tgt1': {'build_by_default': True}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + + def test_kwargs_delete(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'delete.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {}, + 'target#tgt1': {}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + + def test_default_options_set(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_set.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=True', 'cpp_std=c++11']}, + 'target#tgt1': {'build_by_default': True}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + + def test_default_options_delete(self): + self.prime('3 kwargs') + self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_delete.json')) + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + expected = { + 'kwargs': { + 'project#/': {'version': '0.0.1', 'default_options': ['cpp_std=c++14', 'debug=true']}, + 'target#tgt1': {'build_by_default': True}, + 'dependency#dep1': {'required': False} + } + } + self.assertDictEqual(out, expected) + +class NativeFileTests(BasePlatformTests): + + def setUp(self): + super().setUp() + self.testcase = os.path.join(self.unit_test_dir, '47 native file binary') + self.current_config = 0 + self.current_wrapper = 0 + + def helper_create_native_file(self, values): + """Create a config file as a temporary file. + + values should be a nested dictionary structure of {section: {key: + value}} + """ + filename = os.path.join(self.builddir, f'generated{self.current_config}.config') + self.current_config += 1 + with open(filename, 'wt', encoding='utf-8') as f: + for section, entries in values.items(): + f.write(f'[{section}]\n') + for k, v in entries.items(): + if isinstance(v, (bool, int, float)): + f.write(f"{k}={v}\n") + elif isinstance(v, list): + f.write("{}=[{}]\n".format(k, ', '.join([f"'{w}'" for w in v]))) + else: + f.write(f"{k}='{v}'\n") + return filename + + def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs): + """Creates a wrapper around a binary that overrides specific values.""" + filename = os.path.join(dir_ or self.builddir, f'binary_wrapper{self.current_wrapper}.py') + extra_args = extra_args or {} + self.current_wrapper += 1 + if is_haiku(): + chbang = '#!/bin/env python3' + else: + chbang = '#!/usr/bin/env python3' + + with open(filename, 'wt', encoding='utf-8') as f: + f.write(textwrap.dedent('''\ + {} + import argparse + import subprocess + import sys + + def main(): + parser = argparse.ArgumentParser() + '''.format(chbang))) + for name in chain(extra_args, kwargs): + f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name)) + f.write(' args, extra_args = parser.parse_known_args()\n') + for name, value in chain(extra_args.items(), kwargs.items()): + f.write(f' if args.{name}:\n') + f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout'))) + f.write(' sys.exit(0)\n') + f.write(textwrap.dedent(''' + ret = subprocess.run( + ["{}"] + extra_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + print(ret.stdout.decode('utf-8')) + print(ret.stderr.decode('utf-8'), file=sys.stderr) + sys.exit(ret.returncode) + + if __name__ == '__main__': + main() + '''.format(binary))) + + if not is_windows(): + os.chmod(filename, 0o755) + return filename + + # On windows we need yet another level of indirection, as cmd cannot + # invoke python files itself, so instead we generate a .bat file, which + # invokes our python wrapper + batfile = os.path.join(self.builddir, f'binary_wrapper{self.current_wrapper}.bat') + with open(batfile, 'wt', encoding='utf-8') as f: + f.write(fr'@{sys.executable} {filename} %*') + return batfile + + def helper_for_compiler(self, lang, cb, for_machine = MachineChoice.HOST): + """Helper for generating tests for overriding compilers for langaugages + with more than one implementation, such as C, C++, ObjC, ObjC++, and D. + """ + env = get_fake_env() + getter = lambda: compiler_from_language(env, lang, for_machine) + cc = getter() + binary, newid = cb(cc) + env.binaries[for_machine].binaries[lang] = binary + compiler = getter() + self.assertEqual(compiler.id, newid) + + def test_multiple_native_files_override(self): + wrapper = self.helper_create_binary_wrapper('bash', version='foo') + config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + wrapper = self.helper_create_binary_wrapper('bash', version='12345') + config2 = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + self.init(self.testcase, extra_args=[ + '--native-file', config, '--native-file', config2, + '-Dcase=find_program']) + + # This test hangs on cygwin. + @unittest.skipIf(os.name != 'posix' or is_cygwin(), 'Uses fifos, which are not available on non Unix OSes.') + def test_native_file_is_pipe(self): + fifo = os.path.join(self.builddir, 'native.file') + os.mkfifo(fifo) + with tempfile.TemporaryDirectory() as d: + wrapper = self.helper_create_binary_wrapper('bash', d, version='12345') + + def filler(): + with open(fifo, 'w', encoding='utf-8') as f: + f.write('[binaries]\n') + f.write(f"bash = '{wrapper}'\n") + + thread = threading.Thread(target=filler) + thread.start() + + self.init(self.testcase, extra_args=['--native-file', fifo, '-Dcase=find_program']) + + thread.join() + os.unlink(fifo) + + self.init(self.testcase, extra_args=['--wipe']) + + def test_multiple_native_files(self): + wrapper = self.helper_create_binary_wrapper('bash', version='12345') + config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + wrapper = self.helper_create_binary_wrapper('python') + config2 = self.helper_create_native_file({'binaries': {'python': wrapper}}) + self.init(self.testcase, extra_args=[ + '--native-file', config, '--native-file', config2, + '-Dcase=find_program']) + + def _simple_test(self, case, binary, entry=None): + wrapper = self.helper_create_binary_wrapper(binary, version='12345') + config = self.helper_create_native_file({'binaries': {entry or binary: wrapper}}) + self.init(self.testcase, extra_args=['--native-file', config, f'-Dcase={case}']) + + def test_find_program(self): + self._simple_test('find_program', 'bash') + + def test_config_tool_dep(self): + # Do the skip at this level to avoid screwing up the cache + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with LLVM on MSYS2') + if not shutil.which('llvm-config'): + raise unittest.SkipTest('No llvm-installed, cannot test') + self._simple_test('config_dep', 'llvm-config') + + def test_python3_module(self): + self._simple_test('python3', 'python3') + + def test_python_module(self): + if is_windows(): + # Bat adds extra crap to stdout, so the version check logic in the + # python module breaks. This is fine on other OSes because they + # don't need the extra indirection. + raise unittest.SkipTest('bat indirection breaks internal sanity checks.') + elif is_osx(): + binary = 'python' + else: + binary = 'python2' + + # We not have python2, check for it + for v in ['2', '2.7', '-2.7']: + rc = subprocess.call(['pkg-config', '--cflags', f'python{v}'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + if rc == 0: + break + else: + raise unittest.SkipTest('Not running Python 2 tests because dev packages not installed.') + self._simple_test('python', binary, entry='python') + + @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') + @skip_if_env_set('CC') + def test_c_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang', 'clang' + if not is_real_gnu_compiler(shutil.which('gcc')): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'gcc', 'gcc' + self.helper_for_compiler('c', cb) + + @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') + @skip_if_env_set('CXX') + def test_cpp_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang++', 'clang' + if not is_real_gnu_compiler(shutil.which('g++')): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'g++', 'gcc' + self.helper_for_compiler('cpp', cb) + + @skip_if_not_language('objc') + @skip_if_env_set('OBJC') + def test_objc_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang', 'clang' + if not is_real_gnu_compiler(shutil.which('gcc')): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'gcc', 'gcc' + self.helper_for_compiler('objc', cb) + + @skip_if_not_language('objcpp') + @skip_if_env_set('OBJCXX') + def test_objcpp_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang++', 'clang' + if not is_real_gnu_compiler(shutil.which('g++')): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'g++', 'gcc' + self.helper_for_compiler('objcpp', cb) + + @skip_if_not_language('d') + @skip_if_env_set('DC') + def test_d_compiler(self): + def cb(comp): + if comp.id == 'dmd': + if shutil.which('ldc'): + return 'ldc', 'ldc' + elif shutil.which('gdc'): + return 'gdc', 'gdc' + else: + raise unittest.SkipTest('No alternative dlang compiler found.') + if shutil.which('dmd'): + return 'dmd', 'dmd' + raise unittest.SkipTest('No alternative dlang compiler found.') + self.helper_for_compiler('d', cb) + + @skip_if_not_language('cs') + @skip_if_env_set('CSC') + def test_cs_compiler(self): + def cb(comp): + if comp.id == 'csc': + if not shutil.which('mcs'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'mcs', 'mcs' + if not shutil.which('csc'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'csc', 'csc' + self.helper_for_compiler('cs', cb) + + @skip_if_not_language('fortran') + @skip_if_env_set('FC') + def test_fortran_compiler(self): + def cb(comp): + if comp.id == 'lcc': + if shutil.which('lfortran'): + return 'lfortran', 'lcc' + raise unittest.SkipTest('No alternate Fortran implementation.') + elif comp.id == 'gcc': + if shutil.which('ifort'): + # There is an ICC for windows (windows build, linux host), + # but we don't support that ATM so lets not worry about it. + if is_windows(): + return 'ifort', 'intel-cl' + return 'ifort', 'intel' + elif shutil.which('flang'): + return 'flang', 'flang' + elif shutil.which('pgfortran'): + return 'pgfortran', 'pgi' + # XXX: there are several other fortran compilers meson + # supports, but I don't have any of them to test with + raise unittest.SkipTest('No alternate Fortran implementation.') + if not shutil.which('gfortran'): + raise unittest.SkipTest('No alternate Fortran implementation.') + return 'gfortran', 'gcc' + self.helper_for_compiler('fortran', cb) + + def _single_implementation_compiler(self, lang: str, binary: str, version_str: str, version: str) -> None: + """Helper for languages with a single (supported) implementation. + + Builds a wrapper around the compiler to override the version. + """ + wrapper = self.helper_create_binary_wrapper(binary, version=version_str) + env = get_fake_env() + env.binaries.host.binaries[lang] = [wrapper] + compiler = compiler_from_language(env, lang, MachineChoice.HOST) + self.assertEqual(compiler.version, version) + + @skip_if_not_language('vala') + @skip_if_env_set('VALAC') + def test_vala_compiler(self): + self._single_implementation_compiler( + 'vala', 'valac', 'Vala 1.2345', '1.2345') + + @skip_if_not_language('rust') + @skip_if_env_set('RUSTC') + def test_rust_compiler(self): + self._single_implementation_compiler( + 'rust', 'rustc', 'rustc 1.2345', '1.2345') + + @skip_if_not_language('java') + def test_java_compiler(self): + self._single_implementation_compiler( + 'java', 'javac', 'javac 9.99.77', '9.99.77') + + @skip_if_not_language('swift') + def test_swift_compiler(self): + wrapper = self.helper_create_binary_wrapper( + 'swiftc', version='Swift 1.2345', outfile='stderr', + extra_args={'Xlinker': 'macosx_version. PROJECT:ld - 1.2.3'}) + env = get_fake_env() + env.binaries.host.binaries['swift'] = [wrapper] + compiler = detect_swift_compiler(env, MachineChoice.HOST) + self.assertEqual(compiler.version, '1.2345') + + def test_native_file_dirs(self): + testcase = os.path.join(self.unit_test_dir, '60 native file override') + self.init(testcase, default_args=False, + extra_args=['--native-file', os.path.join(testcase, 'nativefile')]) + + def test_native_file_dirs_overridden(self): + testcase = os.path.join(self.unit_test_dir, '60 native file override') + self.init(testcase, default_args=False, + extra_args=['--native-file', os.path.join(testcase, 'nativefile'), + '-Ddef_libdir=liblib', '-Dlibdir=liblib']) + + def test_compile_sys_path(self): + """Compiling with a native file stored in a system path works. + + There was a bug which caused the paths to be stored incorrectly and + would result in ninja invoking meson in an infinite loop. This tests + for that by actually invoking ninja. + """ + testcase = os.path.join(self.common_test_dir, '1 trivial') + + # It really doesn't matter what's in the native file, just that it exists + config = self.helper_create_native_file({'binaries': {'bash': 'false'}}) + + self.init(testcase, extra_args=['--native-file', config]) + self.build() + + def test_user_options(self): + testcase = os.path.join(self.common_test_dir, '40 options') + for opt, value in [('testoption', 'some other val'), ('other_one', True), + ('combo_opt', 'one'), ('array_opt', ['two']), + ('integer_opt', 0), + ('CaseSenSiTivE', 'SOME other Value'), + ('CASESENSITIVE', 'some other Value')]: + config = self.helper_create_native_file({'project options': {opt: value}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_user_options_command_line_overrides(self): + testcase = os.path.join(self.common_test_dir, '40 options') + config = self.helper_create_native_file({'project options': {'other_one': True}}) + self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false']) + + def test_user_options_subproject(self): + testcase = os.path.join(self.unit_test_dir, '79 user options for subproject') + + s = os.path.join(testcase, 'subprojects') + if not os.path.exists(s): + os.mkdir(s) + s = os.path.join(s, 'sub') + if not os.path.exists(s): + sub = os.path.join(self.common_test_dir, '40 options') + shutil.copytree(sub, s) + + for opt, value in [('testoption', 'some other val'), ('other_one', True), + ('combo_opt', 'one'), ('array_opt', ['two']), + ('integer_opt', 0)]: + config = self.helper_create_native_file({'sub:project options': {opt: value}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_option_bool(self): + # Bools are allowed to be unquoted + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({'built-in options': {'werror': True}}) + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'werror' in each['name']: + self.assertEqual(each['value'], True) + break + else: + self.fail('Did not find werror in build options?') + + def test_option_integer(self): + # Bools are allowed to be unquoted + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({'built-in options': {'unity_size': 100}}) + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'unity_size' in each['name']: + self.assertEqual(each['value'], 100) + break + else: + self.fail('Did not find unity_size in build options?') + + def test_builtin_options(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_native_file({'built-in options': {'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++14') + break + else: + self.fail('Did not find werror in build options?') + + def test_builtin_options_conf_overrides_env(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_native_file({'built-in options': {'pkg_config_path': '/foo'}}) + + self.init(testcase, extra_args=['--native-file', config], override_envvars={'PKG_CONFIG_PATH': '/bar'}) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/foo']) + break + else: + self.fail('Did not find pkg_config_path in build options?') + + def test_builtin_options_subprojects(self): + testcase = os.path.join(self.common_test_dir, '98 subproject subdir') + config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}}) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'c_args' in each['name']: + # This path will be hit twice, once for build and once for host, + self.assertEqual(each['value'], ['-Dfoo']) + found += 1 + elif each['name'] == 'default_library': + self.assertEqual(each['value'], 'both') + found += 1 + elif each['name'] == 'sub:default_library': + self.assertEqual(each['value'], 'static') + found += 1 + self.assertEqual(found, 4, 'Did not find all three sections') + + def test_builtin_options_subprojects_overrides_buildfiles(self): + # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + testcase = os.path.join(self.common_test_dir, '223 persubproject options') + config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}}) + + with self.assertRaises((RuntimeError, subprocess.CalledProcessError)) as cm: + self.init(testcase, extra_args=['--native-file', config]) + if isinstance(cm, RuntimeError): + check = str(cm.exception) + else: + check = cm.exception.stdout + self.assertIn(check, 'Parent should override default_library') + + def test_builtin_options_subprojects_dont_inherits_parent_override(self): + # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + testcase = os.path.join(self.common_test_dir, '223 persubproject options') + config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}}) + self.init(testcase, extra_args=['--native-file', config]) + + def test_builtin_options_compiler_properties(self): + # the properties section can have lang_args, and those need to be + # overwritten by the built-in options + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'c_args': ['-DFOO']}, + 'properties': {'c_args': ['-DBAR']}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'c_args': + self.assertEqual(each['value'], ['-DFOO']) + break + else: + self.fail('Did not find c_args in build options?') + + def test_builtin_options_compiler_properties_legacy(self): + # The legacy placement in properties is still valid if a 'built-in + # options' setting is present, but doesn't have the lang_args + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'properties': {'c_args': ['-DBAR']}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'c_args': + self.assertEqual(each['value'], ['-DBAR']) + break + else: + self.fail('Did not find c_args in build options?') + + def test_builtin_options_paths(self): + # the properties section can have lang_args, and those need to be + # overwritten by the built-in options + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'bindir': 'foo'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'foo') + break + else: + self.fail('Did not find bindir in build options?') + + def test_builtin_options_paths_legacy(self): + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'bar') + break + else: + self.fail('Did not find bindir in build options?') + + +class CrossFileTests(BasePlatformTests): + + """Tests for cross file functionality not directly related to + cross compiling. + + This is mainly aimed to testing overrides from cross files. + """ + + def setUp(self): + super().setUp() + self.current_config = 0 + self.current_wrapper = 0 + + def _cross_file_generator(self, *, needs_exe_wrapper: bool = False, + exe_wrapper: T.Optional[T.List[str]] = None) -> str: + if is_windows(): + raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows') + + return textwrap.dedent(f"""\ + [binaries] + c = '{shutil.which('gcc' if is_sunos() else 'cc')}' + ar = '{shutil.which('ar')}' + strip = '{shutil.which('strip')}' + exe_wrapper = {str(exe_wrapper) if exe_wrapper is not None else '[]'} + + [properties] + needs_exe_wrapper = {needs_exe_wrapper} + + [host_machine] + system = 'linux' + cpu_family = 'x86' + cpu = 'i686' + endian = 'little' + """) + + def _stub_exe_wrapper(self) -> str: + return textwrap.dedent('''\ + #!/usr/bin/env python3 + import subprocess + import sys + + sys.exit(subprocess.run(sys.argv[1:]).returncode) + ''') + + def test_needs_exe_wrapper_true(self): + testdir = os.path.join(self.unit_test_dir, '71 cross test passed') + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'crossfile' + with p.open('wt', encoding='utf-8') as f: + f.write(self._cross_file_generator(needs_exe_wrapper=True)) + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + out = self.run_target('test') + self.assertRegex(out, r'Skipped:\s*1\s*\n') + + def test_needs_exe_wrapper_false(self): + testdir = os.path.join(self.unit_test_dir, '71 cross test passed') + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'crossfile' + with p.open('wt', encoding='utf-8') as f: + f.write(self._cross_file_generator(needs_exe_wrapper=False)) + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + out = self.run_target('test') + self.assertNotRegex(out, r'Skipped:\s*1\n') + + def test_needs_exe_wrapper_true_wrapper(self): + testdir = os.path.join(self.unit_test_dir, '71 cross test passed') + with tempfile.TemporaryDirectory() as d: + s = Path(d) / 'wrapper.py' + with s.open('wt', encoding='utf-8') as f: + f.write(self._stub_exe_wrapper()) + s.chmod(0o774) + p = Path(d) / 'crossfile' + with p.open('wt', encoding='utf-8') as f: + f.write(self._cross_file_generator( + needs_exe_wrapper=True, + exe_wrapper=[str(s)])) + + self.init(testdir, extra_args=['--cross-file=' + str(p), '-Dexpect=true']) + out = self.run_target('test') + self.assertRegex(out, r'Ok:\s*3\s*\n') + + def test_cross_exe_passed_no_wrapper(self): + testdir = os.path.join(self.unit_test_dir, '71 cross test passed') + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'crossfile' + with p.open('wt', encoding='utf-8') as f: + f.write(self._cross_file_generator(needs_exe_wrapper=True)) + + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + self.build() + out = self.run_target('test') + self.assertRegex(out, r'Skipped:\s*1\s*\n') + + # The test uses mocking and thus requires that the current process is the + # one to run the Meson steps. If we are using an external test executable + # (most commonly in Debian autopkgtests) then the mocking won't work. + @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.') + def test_cross_file_system_paths(self): + if is_windows(): + raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)') + + testdir = os.path.join(self.common_test_dir, '1 trivial') + cross_content = self._cross_file_generator() + with tempfile.TemporaryDirectory() as d: + dir_ = os.path.join(d, 'meson', 'cross') + os.makedirs(dir_) + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + f.write(cross_content) + name = os.path.basename(f.name) + + with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}): + self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) + self.wipe() + + with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}): + os.environ.pop('XDG_DATA_HOME', None) + self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) + self.wipe() + + with tempfile.TemporaryDirectory() as d: + dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') + os.makedirs(dir_) + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + f.write(cross_content) + name = os.path.basename(f.name) + + # If XDG_DATA_HOME is set in the environment running the + # tests this test will fail, os mock the environment, pop + # it, then test + with mock.patch.dict(os.environ): + os.environ.pop('XDG_DATA_HOME', None) + with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)): + self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) + self.wipe() + + def helper_create_cross_file(self, values): + """Create a config file as a temporary file. + + values should be a nested dictionary structure of {section: {key: + value}} + """ + filename = os.path.join(self.builddir, f'generated{self.current_config}.config') + self.current_config += 1 + with open(filename, 'wt', encoding='utf-8') as f: + for section, entries in values.items(): + f.write(f'[{section}]\n') + for k, v in entries.items(): + f.write(f"{k}={v!r}\n") + return filename + + def test_cross_file_dirs(self): + testcase = os.path.join(self.unit_test_dir, '60 native file override') + self.init(testcase, default_args=False, + extra_args=['--native-file', os.path.join(testcase, 'nativefile'), + '--cross-file', os.path.join(testcase, 'crossfile'), + '-Ddef_bindir=binbar', + '-Ddef_datadir=databar', + '-Ddef_includedir=includebar', + '-Ddef_infodir=infobar', + '-Ddef_libdir=libbar', + '-Ddef_libexecdir=libexecbar', + '-Ddef_localedir=localebar', + '-Ddef_localstatedir=localstatebar', + '-Ddef_mandir=manbar', + '-Ddef_sbindir=sbinbar', + '-Ddef_sharedstatedir=sharedstatebar', + '-Ddef_sysconfdir=sysconfbar']) + + def test_cross_file_dirs_overridden(self): + testcase = os.path.join(self.unit_test_dir, '60 native file override') + self.init(testcase, default_args=False, + extra_args=['--native-file', os.path.join(testcase, 'nativefile'), + '--cross-file', os.path.join(testcase, 'crossfile'), + '-Ddef_libdir=liblib', '-Dlibdir=liblib', + '-Ddef_bindir=binbar', + '-Ddef_datadir=databar', + '-Ddef_includedir=includebar', + '-Ddef_infodir=infobar', + '-Ddef_libexecdir=libexecbar', + '-Ddef_localedir=localebar', + '-Ddef_localstatedir=localstatebar', + '-Ddef_mandir=manbar', + '-Ddef_sbindir=sbinbar', + '-Ddef_sharedstatedir=sharedstatebar', + '-Ddef_sysconfdir=sysconfbar']) + + def test_cross_file_dirs_chain(self): + # crossfile2 overrides crossfile overrides nativefile + testcase = os.path.join(self.unit_test_dir, '60 native file override') + self.init(testcase, default_args=False, + extra_args=['--native-file', os.path.join(testcase, 'nativefile'), + '--cross-file', os.path.join(testcase, 'crossfile'), + '--cross-file', os.path.join(testcase, 'crossfile2'), + '-Ddef_bindir=binbar2', + '-Ddef_datadir=databar', + '-Ddef_includedir=includebar', + '-Ddef_infodir=infobar', + '-Ddef_libdir=libbar', + '-Ddef_libexecdir=libexecbar', + '-Ddef_localedir=localebar', + '-Ddef_localstatedir=localstatebar', + '-Ddef_mandir=manbar', + '-Ddef_sbindir=sbinbar', + '-Ddef_sharedstatedir=sharedstatebar', + '-Ddef_sysconfdir=sysconfbar']) + + def test_user_options(self): + # This is just a touch test for cross file, since the implementation + # shares code after loading from the files + testcase = os.path.join(self.common_test_dir, '40 options') + config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--cross-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_builtin_options(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--cross-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++14') + break + else: + self.fail('No c++ standard set?') + + def test_builtin_options_per_machine(self): + """Test options that are allowed to be set on a per-machine basis. + + Such options could be passed twice, once for the build machine, and + once for the host machine. I've picked pkg-config path, but any would + do that can be set for both. + """ + testcase = os.path.join(self.common_test_dir, '2 cpp') + cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}}) + native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}}) + + # Ensure that PKG_CONFIG_PATH is not set in the environment + with mock.patch.dict('os.environ'): + for k in ['PKG_CONFIG_PATH', 'PKG_CONFIG_PATH_FOR_BUILD']: + try: + del os.environ[k] + except KeyError: + pass + self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native]) + + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/cross/path']) + found += 1 + elif each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++17') + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/native/path']) + found += 1 + elif each['name'] == 'build.cpp_std': + self.assertEqual(each['value'], 'c++14') + found += 1 + + if found == 4: + break + self.assertEqual(found, 4, 'Did not find all sections.') + + def test_builtin_options_conf_overrides_env(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native', 'cpp_args': '-DFILE'}}) + cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross', 'cpp_args': '-DFILE'}}) + + self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross], + override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir', + 'CXXFLAGS': '-DENV', 'CXXFLAGS_FOR_BUILD': '-DENV'}) + configuration = self.introspect('--buildoptions') + found = 0 + expected = 4 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/cross']) + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/native']) + found += 1 + elif each['name'].endswith('cpp_args'): + self.assertEqual(each['value'], ['-DFILE']) + found += 1 + if found == expected: + break + self.assertEqual(found, expected, 'Did not find all sections.') + + def test_for_build_env_vars(self) -> None: + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {}}) + cross = self.helper_create_cross_file({'built-in options': {}}) + + self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross], + override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir'}) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/bar']) + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/dir']) + found += 1 + if found == 2: + break + self.assertEqual(found, 2, 'Did not find all sections.') + + def test_project_options_native_only(self) -> None: + # Do not load project options from a native file when doing a cross + # build + testcase = os.path.join(self.unit_test_dir, '19 array option') + config = self.helper_create_cross_file({'project options': {'list': ['bar', 'foo']}}) + cross = self.helper_create_cross_file({'binaries': {}}) + + self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'list': + self.assertEqual(each['value'], ['foo', 'bar']) + break + else: + self.fail('Did not find expected option.') + + +class TAPParserTests(unittest.TestCase): + def assert_test(self, events, **kwargs): + if 'explanation' not in kwargs: + kwargs['explanation'] = None + self.assertEqual(next(events), TAPParser.Test(**kwargs)) + + def assert_plan(self, events, **kwargs): + if 'skipped' not in kwargs: + kwargs['skipped'] = False + if 'explanation' not in kwargs: + kwargs['explanation'] = None + self.assertEqual(next(events), TAPParser.Plan(**kwargs)) + + def assert_version(self, events, **kwargs): + self.assertEqual(next(events), TAPParser.Version(**kwargs)) + + def assert_error(self, events): + self.assertEqual(type(next(events)), TAPParser.Error) + + def assert_bailout(self, events, **kwargs): + self.assertEqual(next(events), TAPParser.Bailout(**kwargs)) + + def assert_last(self, events): + with self.assertRaises(StopIteration): + next(events) + + def parse_tap(self, s): + parser = TAPParser() + return iter(parser.parse(io.StringIO(s))) + + def parse_tap_v13(self, s): + events = self.parse_tap('TAP version 13\n' + s) + self.assert_version(events, version=13) + return events + + def test_empty(self): + events = self.parse_tap('') + self.assert_last(events) + + def test_empty_plan(self): + events = self.parse_tap('1..0') + self.assert_plan(events, num_tests=0, late=False, skipped=True) + self.assert_last(events) + + def test_plan_directive(self): + events = self.parse_tap('1..0 # skipped for some reason') + self.assert_plan(events, num_tests=0, late=False, skipped=True, + explanation='for some reason') + self.assert_last(events) + + events = self.parse_tap('1..1 # skipped for some reason\nok 1') + self.assert_error(events) + self.assert_plan(events, num_tests=1, late=False, skipped=True, + explanation='for some reason') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + events = self.parse_tap('1..1 # todo not supported here\nok 1') + self.assert_error(events) + self.assert_plan(events, num_tests=1, late=False, skipped=False, + explanation='not supported here') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_one_test_ok(self): + events = self.parse_tap('ok') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_one_test_with_number(self): + events = self.parse_tap('ok 1') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_one_test_with_name(self): + events = self.parse_tap('ok 1 abc') + self.assert_test(events, number=1, name='abc', result=TestResult.OK) + self.assert_last(events) + + def test_one_test_not_ok(self): + events = self.parse_tap('not ok') + self.assert_test(events, number=1, name='', result=TestResult.FAIL) + self.assert_last(events) + + def test_one_test_todo(self): + events = self.parse_tap('not ok 1 abc # TODO') + self.assert_test(events, number=1, name='abc', result=TestResult.EXPECTEDFAIL) + self.assert_last(events) + + events = self.parse_tap('ok 1 abc # TODO') + self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS) + self.assert_last(events) + + def test_one_test_skip(self): + events = self.parse_tap('ok 1 abc # SKIP') + self.assert_test(events, number=1, name='abc', result=TestResult.SKIP) + self.assert_last(events) + + def test_one_test_skip_failure(self): + events = self.parse_tap('not ok 1 abc # SKIP') + self.assert_test(events, number=1, name='abc', result=TestResult.FAIL) + self.assert_last(events) + + def test_many_early_plan(self): + events = self.parse_tap('1..4\nok 1\nnot ok 2\nok 3\nnot ok 4') + self.assert_plan(events, num_tests=4, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_test(events, number=3, name='', result=TestResult.OK) + self.assert_test(events, number=4, name='', result=TestResult.FAIL) + self.assert_last(events) + + def test_many_late_plan(self): + events = self.parse_tap('ok 1\nnot ok 2\nok 3\nnot ok 4\n1..4') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_test(events, number=3, name='', result=TestResult.OK) + self.assert_test(events, number=4, name='', result=TestResult.FAIL) + self.assert_plan(events, num_tests=4, late=True) + self.assert_last(events) + + def test_directive_case(self): + events = self.parse_tap('ok 1 abc # skip') + self.assert_test(events, number=1, name='abc', result=TestResult.SKIP) + self.assert_last(events) + + events = self.parse_tap('ok 1 abc # ToDo') + self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS) + self.assert_last(events) + + def test_directive_explanation(self): + events = self.parse_tap('ok 1 abc # skip why') + self.assert_test(events, number=1, name='abc', result=TestResult.SKIP, + explanation='why') + self.assert_last(events) + + events = self.parse_tap('ok 1 abc # ToDo Because') + self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS, + explanation='Because') + self.assert_last(events) + + def test_one_test_early_plan(self): + events = self.parse_tap('1..1\nok') + self.assert_plan(events, num_tests=1, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_one_test_late_plan(self): + events = self.parse_tap('ok\n1..1') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_plan(events, num_tests=1, late=True) + self.assert_last(events) + + def test_out_of_order(self): + events = self.parse_tap('ok 2') + self.assert_error(events) + self.assert_test(events, number=2, name='', result=TestResult.OK) + self.assert_last(events) + + def test_middle_plan(self): + events = self.parse_tap('ok 1\n1..2\nok 2') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_plan(events, num_tests=2, late=True) + self.assert_error(events) + self.assert_test(events, number=2, name='', result=TestResult.OK) + self.assert_last(events) + + def test_too_many_plans(self): + events = self.parse_tap('1..1\n1..2\nok 1') + self.assert_plan(events, num_tests=1, late=False) + self.assert_error(events) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_too_many(self): + events = self.parse_tap('ok 1\nnot ok 2\n1..1') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_plan(events, num_tests=1, late=True) + self.assert_error(events) + self.assert_last(events) + + events = self.parse_tap('1..1\nok 1\nnot ok 2') + self.assert_plan(events, num_tests=1, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_error(events) + self.assert_last(events) + + def test_too_few(self): + events = self.parse_tap('ok 1\nnot ok 2\n1..3') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_plan(events, num_tests=3, late=True) + self.assert_error(events) + self.assert_last(events) + + events = self.parse_tap('1..3\nok 1\nnot ok 2') + self.assert_plan(events, num_tests=3, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_error(events) + self.assert_last(events) + + def test_too_few_bailout(self): + events = self.parse_tap('1..3\nok 1\nnot ok 2\nBail out! no third test') + self.assert_plan(events, num_tests=3, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_bailout(events, message='no third test') + self.assert_last(events) + + def test_diagnostics(self): + events = self.parse_tap('1..1\n# ignored\nok 1') + self.assert_plan(events, num_tests=1, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + events = self.parse_tap('# ignored\n1..1\nok 1\n# ignored too') + self.assert_plan(events, num_tests=1, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + events = self.parse_tap('# ignored\nok 1\n1..1\n# ignored too') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_plan(events, num_tests=1, late=True) + self.assert_last(events) + + def test_empty_line(self): + events = self.parse_tap('1..1\n\nok 1') + self.assert_plan(events, num_tests=1, late=False) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_unexpected(self): + events = self.parse_tap('1..1\ninvalid\nok 1') + self.assert_plan(events, num_tests=1, late=False) + self.assert_error(events) + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_last(events) + + def test_version(self): + events = self.parse_tap('TAP version 13\n') + self.assert_version(events, version=13) + self.assert_last(events) + + events = self.parse_tap('TAP version 12\n') + self.assert_error(events) + self.assert_last(events) + + events = self.parse_tap('1..0\nTAP version 13\n') + self.assert_plan(events, num_tests=0, late=False, skipped=True) + self.assert_error(events) + self.assert_last(events) + + def test_yaml(self): + events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def\n ...\nok 2') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_test(events, number=2, name='', result=TestResult.OK) + self.assert_last(events) + + events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_error(events) + self.assert_last(events) + + events = self.parse_tap_v13('ok 1\n ---\n foo: abc\n bar: def\nnot ok 2') + self.assert_test(events, number=1, name='', result=TestResult.OK) + self.assert_error(events) + self.assert_test(events, number=2, name='', result=TestResult.FAIL) + self.assert_last(events) + +class SubprojectsCommandTests(BasePlatformTests): + def setUp(self): + super().setUp() + self.root_dir = Path(self.builddir) + + self.project_dir = self.root_dir / 'src' + self._create_project(self.project_dir) + + self.subprojects_dir = self.project_dir / 'subprojects' + os.makedirs(str(self.subprojects_dir)) + self.packagecache_dir = self.subprojects_dir / 'packagecache' + os.makedirs(str(self.packagecache_dir)) + + def _create_project(self, path, project_name='dummy'): + os.makedirs(str(path), exist_ok=True) + with open(str(path / 'meson.build'), 'w', encoding='utf-8') as f: + f.write(f"project('{project_name}')") + + def _git(self, cmd, workdir): + return git(cmd, str(workdir), check=True)[1].strip() + + def _git_config(self, workdir): + self._git(['config', 'user.name', 'Meson Test'], workdir) + self._git(['config', 'user.email', 'meson.test@example.com'], workdir) + + def _git_remote(self, cmd, name): + return self._git(cmd, self.root_dir / name) + + def _git_local(self, cmd, name): + return self._git(cmd, self.subprojects_dir / name) + + def _git_local_branch(self, name): + # Same as `git branch --show-current` but compatible with older git version + branch = self._git_local(['rev-parse', '--abbrev-ref', 'HEAD'], name) + return branch if branch != 'HEAD' else '' + + def _git_local_commit(self, name, ref='HEAD'): + return self._git_local(['rev-parse', ref], name) + + def _git_remote_commit(self, name, ref='HEAD'): + return self._git_remote(['rev-parse', ref], name) + + def _git_create_repo(self, path): + # If a user has git configuration init.defaultBranch set we want to override that + with tempfile.TemporaryDirectory() as d: + out = git(['--version'], str(d))[1] + if version_compare(search_version(out), '>= 2.28'): + extra_cmd = ['--initial-branch', 'master'] + else: + extra_cmd = [] + + self._create_project(path) + self._git(['init'] + extra_cmd, path) + self._git_config(path) + self._git(['add', '.'], path) + self._git(['commit', '-m', 'Initial commit'], path) + + def _git_create_remote_repo(self, name): + self._git_create_repo(self.root_dir / name) + + def _git_create_local_repo(self, name): + self._git_create_repo(self.subprojects_dir / name) + + def _git_create_remote_commit(self, name, branch): + self._git_remote(['checkout', branch], name) + self._git_remote(['commit', '--allow-empty', '-m', f'initial {branch} commit'], name) + + def _git_create_remote_branch(self, name, branch): + self._git_remote(['checkout', '-b', branch], name) + self._git_remote(['commit', '--allow-empty', '-m', f'initial {branch} commit'], name) + + def _git_create_remote_tag(self, name, tag): + self._git_remote(['commit', '--allow-empty', '-m', f'tag {tag} commit'], name) + self._git_remote(['tag', tag], name) + + def _wrap_create_git(self, name, revision='master'): + path = self.root_dir / name + with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent( + ''' + [wrap-git] + url={} + revision={} + '''.format(os.path.abspath(str(path)), revision))) + + def _wrap_create_file(self, name, tarball='dummy.tar.gz'): + path = self.root_dir / tarball + with open(str((self.subprojects_dir / name).with_suffix('.wrap')), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent( + f''' + [wrap-file] + source_url={os.path.abspath(str(path))} + source_filename={tarball} + ''')) + Path(self.packagecache_dir / tarball).touch() + + def _subprojects_cmd(self, args): + return self._run(self.meson_command + ['subprojects'] + args, workdir=str(self.project_dir)) + + def test_git_update(self): + subp_name = 'sub1' + + # Create a fake remote git repository and a wrap file. Checks that + # "meson subprojects download" works. + self._git_create_remote_repo(subp_name) + self._wrap_create_git(subp_name) + self._subprojects_cmd(['download']) + self.assertPathExists(str(self.subprojects_dir / subp_name)) + self._git_config(self.subprojects_dir / subp_name) + + # Create a new remote branch and update the wrap file. Checks that + # "meson subprojects update --reset" checkout the new branch. + self._git_create_remote_branch(subp_name, 'newbranch') + self._wrap_create_git(subp_name, 'newbranch') + self._subprojects_cmd(['update', '--reset']) + self.assertEqual(self._git_local_branch(subp_name), 'newbranch') + self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) + + # Update remote newbranch. Checks the new commit is pulled into existing + # local newbranch. Make sure it does not print spurious 'git stash' message. + self._git_create_remote_commit(subp_name, 'newbranch') + out = self._subprojects_cmd(['update', '--reset']) + self.assertNotIn('No local changes to save', out) + self.assertEqual(self._git_local_branch(subp_name), 'newbranch') + self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) + + # Update remote newbranch and switch to another branch. Checks that it + # switch current branch to newbranch and pull latest commit. + self._git_local(['checkout', 'master'], subp_name) + self._git_create_remote_commit(subp_name, 'newbranch') + self._subprojects_cmd(['update', '--reset']) + self.assertEqual(self._git_local_branch(subp_name), 'newbranch') + self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) + + # Stage some local changes then update. Checks that local changes got + # stashed. + self._create_project(self.subprojects_dir / subp_name, 'new_project_name') + self._git_local(['add', '.'], subp_name) + self._git_create_remote_commit(subp_name, 'newbranch') + self._subprojects_cmd(['update', '--reset']) + self.assertEqual(self._git_local_branch(subp_name), 'newbranch') + self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newbranch')) + self.assertTrue(self._git_local(['stash', 'list'], subp_name)) + + # Create a new remote tag and update the wrap file. Checks that + # "meson subprojects update --reset" checkout the new tag in detached mode. + self._git_create_remote_tag(subp_name, 'newtag') + self._wrap_create_git(subp_name, 'newtag') + self._subprojects_cmd(['update', '--reset']) + self.assertEqual(self._git_local_branch(subp_name), '') + self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name, 'newtag')) + + # Create a new remote commit and update the wrap file with the commit id. + # Checks that "meson subprojects update --reset" checkout the new commit + # in detached mode. + self._git_local(['checkout', 'master'], subp_name) + self._git_create_remote_commit(subp_name, 'newbranch') + new_commit = self._git_remote(['rev-parse', 'HEAD'], subp_name) + self._wrap_create_git(subp_name, new_commit) + self._subprojects_cmd(['update', '--reset']) + self.assertEqual(self._git_local_branch(subp_name), '') + self.assertEqual(self._git_local_commit(subp_name), new_commit) + + # Create a local project not in a git repository, then update it with + # a git wrap. Without --reset it should print error message and return + # failure. With --reset it should delete existing project and clone the + # new project. + subp_name = 'sub2' + self._create_project(self.subprojects_dir / subp_name) + self._git_create_remote_repo(subp_name) + self._wrap_create_git(subp_name) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self._subprojects_cmd(['update']) + self.assertIn('Not a git repository', cm.exception.output) + self._subprojects_cmd(['update', '--reset']) + self.assertEqual(self._git_local_commit(subp_name), self._git_remote_commit(subp_name)) + + @skipIfNoExecutable('true') + def test_foreach(self): + self._create_project(self.subprojects_dir / 'sub_file') + self._wrap_create_file('sub_file') + self._git_create_local_repo('sub_git') + self._wrap_create_git('sub_git') + self._git_create_local_repo('sub_git_no_wrap') + + def ran_in(s): + ret = [] + prefix = 'Executing command in ' + for l in s.splitlines(): + if l.startswith(prefix): + ret.append(l[len(prefix):]) + return sorted(ret) + + dummy_cmd = ['true'] + out = self._subprojects_cmd(['foreach'] + dummy_cmd) + self.assertEqual(ran_in(out), sorted(['subprojects/sub_file', 'subprojects/sub_git', 'subprojects/sub_git_no_wrap'])) + out = self._subprojects_cmd(['foreach', '--types', 'git,file'] + dummy_cmd) + self.assertEqual(ran_in(out), sorted(['subprojects/sub_file', 'subprojects/sub_git'])) + out = self._subprojects_cmd(['foreach', '--types', 'file'] + dummy_cmd) + self.assertEqual(ran_in(out), ['subprojects/sub_file']) + out = self._subprojects_cmd(['foreach', '--types', 'git'] + dummy_cmd) + self.assertEqual(ran_in(out), ['subprojects/sub_git']) + + def test_purge(self): + self._create_project(self.subprojects_dir / 'sub_file') + self._wrap_create_file('sub_file') + self._git_create_local_repo('sub_git') + self._wrap_create_git('sub_git') + + sub_file_subprojects_dir = self.subprojects_dir / 'sub_file' / 'subprojects' + sub_file_subprojects_dir.mkdir(exist_ok=True, parents=True) + real_dir = Path('sub_file') / 'subprojects' / 'real' + + self._wrap_create_file(real_dir, tarball='dummy2.tar.gz') + + with open(str((self.subprojects_dir / 'redirect').with_suffix('.wrap')), 'w', encoding='utf-8') as f: + f.write(textwrap.dedent( + f''' + [wrap-redirect] + filename = {real_dir}.wrap + ''')) + + def deleting(s: str) -> T.List[str]: + ret = [] + prefix = 'Deleting ' + for l in s.splitlines(): + if l.startswith(prefix): + ret.append(l[len(prefix):]) + return sorted(ret) + + out = self._subprojects_cmd(['purge']) + self.assertEqual(deleting(out), sorted([ + str(self.subprojects_dir / 'redirect.wrap'), + str(self.subprojects_dir / 'sub_file'), + str(self.subprojects_dir / 'sub_git'), + ])) + out = self._subprojects_cmd(['purge', '--include-cache']) + self.assertEqual(deleting(out), sorted([ + str(self.subprojects_dir / 'sub_git'), + str(self.subprojects_dir / 'redirect.wrap'), + str(self.subprojects_dir / 'packagecache' / 'dummy.tar.gz'), + str(self.subprojects_dir / 'packagecache' / 'dummy2.tar.gz'), + str(self.subprojects_dir / 'sub_file'), + ])) + out = self._subprojects_cmd(['purge', '--include-cache', '--confirm']) + self.assertEqual(deleting(out), sorted([ + str(self.subprojects_dir / 'sub_git'), + str(self.subprojects_dir / 'redirect.wrap'), + str(self.subprojects_dir / 'packagecache' / 'dummy.tar.gz'), + str(self.subprojects_dir / 'packagecache' / 'dummy2.tar.gz'), + str(self.subprojects_dir / 'sub_file'), + ])) + self.assertFalse(Path(self.subprojects_dir / 'packagecache' / 'dummy.tar.gz').exists()) + self.assertFalse(Path(self.subprojects_dir / 'sub_file').exists()) + self.assertFalse(Path(self.subprojects_dir / 'sub_git').exists()) + self.assertFalse(Path(self.subprojects_dir / 'redirect.wrap').exists()) + +def _clang_at_least(compiler: 'Compiler', minver: str, apple_minver: T.Optional[str]) -> bool: + """ + check that Clang compiler is at least a specified version, whether AppleClang or regular Clang + + Parameters + ---------- + compiler: + Meson compiler object + minver: str + Clang minimum version + apple_minver: str + AppleCLang minimum version + + Returns + ------- + at_least: bool + Clang is at least the specified version + """ + if isinstance(compiler, (mesonbuild.compilers.AppleClangCCompiler, + mesonbuild.compilers.AppleClangCPPCompiler)): + if apple_minver is None: + return False + return version_compare(compiler.version, apple_minver) + return version_compare(compiler.version, minver) + + +def unset_envs(): + # For unit tests we must fully control all command lines + # so that there are no unexpected changes coming from the + # environment, for example when doing a package build. + varnames = ['CPPFLAGS', 'LDFLAGS'] + list(mesonbuild.compilers.compilers.CFLAGS_MAPPING.values()) + for v in varnames: + if v in os.environ: + del os.environ[v] + +def convert_args(argv): + # If we got passed a list of tests, pass it on + pytest_args = ['-v'] if '-v' in argv else [] + test_list = [] + for arg in argv: + if arg.startswith('-'): + if arg in ('-f', '--failfast'): + arg = '--exitfirst' + pytest_args.append(arg) + continue + # ClassName.test_name => 'ClassName and test_name' + if '.' in arg: + arg = ' and '.join(arg.split('.')) + test_list.append(arg) + if test_list: + pytest_args += ['-k', ' or '.join(test_list)] + return pytest_args + +def running_single_tests(argv, cases): + ''' + Check whether we only got arguments for running individual tests, not + entire testcases, and not all testcases (no test args). + ''' + got_test_arg = False + for arg in argv: + if arg.startswith('-'): + continue + for case in cases: + if not arg.startswith(case): + continue + if '.' not in arg: + # Got a testcase, done + return False + got_test_arg = True + return got_test_arg + +def setup_backend(): + filtered = [] + be = 'ninja' + for a in sys.argv: + if a.startswith('--backend'): + be = a.split('=')[1] + else: + filtered.append(a) + # Since we invoke the tests via unittest or xtest test runner + # we need to pass the backend to use to the spawned process via + # this side channel. Yes it sucks, but at least is is fully + # internal to this file. + os.environ['MESON_UNIT_TEST_BACKEND'] = be + sys.argv = filtered + +def main(): + unset_envs() + setup_backend() + cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', + 'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests', + 'TAPParserTests', 'SubprojectsCommandTests', + + 'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests', + 'WindowsTests', 'DarwinTests'] + + try: + import pytest # noqa: F401 + # Need pytest-xdist for `-n` arg + import xdist # noqa: F401 + pytest_args = [] + # Don't use pytest-xdist when running single unit tests since it wastes + # time spawning a lot of processes to distribute tests to in that case. + if not running_single_tests(sys.argv, cases): + pytest_args += ['-n', 'auto'] + # Let there be colors! + if 'CI' in os.environ: + pytest_args += ['--color=yes'] + pytest_args += ['./run_unittests.py'] + pytest_args += convert_args(sys.argv[1:]) + # Always disable pytest-cov because we use a custom setup + try: + import pytest_cov # noqa: F401 + print('Disabling pytest-cov') + pytest_args += ['-p' 'no:cov'] + except ImportError: + pass + return subprocess.run(python_command + ['-m', 'pytest'] + pytest_args).returncode + except ImportError: + print('pytest-xdist not found, using unittest instead') + # Fallback to plain unittest. + return unittest.main(defaultTest=cases, buffer=True) + +if __name__ == '__main__': + setup_vsenv() + print('Meson build system', mesonbuild.coredata.version, 'Unit Tests') + start = time.monotonic() + try: + raise SystemExit(main()) + finally: + print('Total time: {:.3f} seconds'.format(time.monotonic() - start)) diff --git a/meson/setup.cfg b/meson/setup.cfg new file mode 100644 index 000000000..7c49b71c6 --- /dev/null +++ b/meson/setup.cfg @@ -0,0 +1,57 @@ +[metadata] +name = meson +version = attr: mesonbuild.coredata.version +description = A high performance build system +author = Jussi Pakkanen +author_email = jpakkane@gmail.com +url = https://mesonbuild.com +keywords = + meson + mesonbuild + build system + cmake +license = Apache License, Version 2.0 +license_file = COPYING +classifiers = + Development Status :: 5 - Production/Stable + Environment :: Console + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Natural Language :: English + Operating System :: MacOS :: MacOS X + Operating System :: Microsoft :: Windows + Operating System :: POSIX :: BSD + Operating System :: POSIX :: Linux + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Topic :: Software Development :: Build Tools +long_description = Meson is a cross-platform build system designed to be both as fast and as user friendly as possible. It supports many languages and compilers, including GCC, Clang, PGI, Intel, and Visual Studio. Its build definitions are written in a simple non-Turing complete DSL. + +[options] +packages = find: +python_requires = >= 3.6 +setup_requires = + setuptools + +[options.entry_points] +console_scripts = + meson = mesonbuild.mesonmain:main + +[options.extras_require] +progress = + tqdm +typing = + mypy + typing_extensions; python_version <"3.8" + +[options.packages.find] +include = mesonbuild, mesonbuild.* +exclude = *.data + +[tool:pytest] +python_classes = +python_files = + run_unittests.py diff --git a/meson/setup.py b/meson/setup.py new file mode 100644 index 000000000..976afb234 --- /dev/null +++ b/meson/setup.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 + +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +if sys.version_info < (3, 6): + raise SystemExit('ERROR: Tried to install Meson with an unsupported Python version: \n{}' + '\nMeson requires Python 3.6.0 or greater'.format(sys.version)) + +from setuptools import setup + +data_files = [] +if sys.platform != 'win32': + # Only useful on UNIX-like systems + data_files = [('share/man/man1', ['man/meson.1']), + ('share/polkit-1/actions', ['data/com.mesonbuild.install.policy'])] + +setup(data_files=data_files,) diff --git a/meson/sider.yml b/meson/sider.yml new file mode 100644 index 000000000..5c5619692 --- /dev/null +++ b/meson/sider.yml @@ -0,0 +1,7 @@ +linter: + flake8: + version: 3 + plugins: + - flake8-blind-except + - flake8-builtins + - flake8-bugbear diff --git a/meson/skip_ci.py b/meson/skip_ci.py new file mode 100755 index 000000000..7411d5763 --- /dev/null +++ b/meson/skip_ci.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import argparse +import os +import subprocess +import sys +import traceback + + +def check_pr(is_pr_env): + if is_pr_env not in os.environ: + print(f'This is not pull request: {is_pr_env} is not set') + sys.exit() + elif os.environ[is_pr_env] == 'false': + print(f'This is not pull request: {is_pr_env} is false') + sys.exit() + + +def get_base_branch(base_env): + if base_env not in os.environ: + print(f'Unable to determine base branch: {base_env} is not set') + sys.exit() + return os.environ[base_env] + + +def get_git_files(base): + diff = subprocess.check_output(['git', 'diff', '--name-only', base + '...HEAD']) + return diff.strip().split(b'\n') + + +def is_documentation(filename): + return filename.startswith(b'docs/') + + +def main(): + try: + parser = argparse.ArgumentParser(description='CI Skipper') + parser.add_argument('--base-branch-env', required=True, + help='Branch push is targeted to') + parser.add_argument('--is-pull-env', required=True, + help='Variable set if it is a PR') + parser.add_argument('--base-branch-origin', action='store_true', + help='Base branch reference is only in origin remote') + args = parser.parse_args() + check_pr(args.is_pull_env) + base = get_base_branch(args.base_branch_env) + if args.base_branch_origin: + base = 'origin/' + base + if all(is_documentation(f) for f in get_git_files(base)): + print("Don't run CI for documentation-only changes, add '[skip ci]' to commit title.") + print('See http://mesonbuild.com/Contributing.html#skipping-integration-tests') + sys.exit(1) + except Exception: + # If this script fails we want build to proceed. + # Failure likely means some corner case we did not consider or bug. + # Either case this should not prevent CI from running if it is needed, + # and we tolerate it if it is run where it is not required. + traceback.print_exc() + print('There is a BUG in skip_ci.py, exiting.') + sys.exit() + +if __name__ == '__main__': + main() diff --git a/meson/test cases/cmake/1 basic/main.cpp b/meson/test cases/cmake/1 basic/main.cpp new file mode 100644 index 000000000..95079615a --- /dev/null +++ b/meson/test cases/cmake/1 basic/main.cpp @@ -0,0 +1,10 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/1 basic/meson.build b/meson/test cases/cmake/1 basic/meson.build new file mode 100644 index 000000000..246473904 --- /dev/null +++ b/meson/test cases/cmake/1 basic/meson.build @@ -0,0 +1,14 @@ +project('cmakeSubTest', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib++', include_type: 'system') + +assert(sub_pro.found(), 'found() method reports not found, but should be found') +assert(sub_pro.target_list() == ['cmModLib++'], 'There should be exactly one target') +assert(sub_pro.target_type('cmModLib++') == 'shared_library', 'Target type should be shared_library') +assert(sub_dep.include_type() == 'system', 'the include_type kwarg of dependency() works') + +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..2197667a3 --- /dev/null +++ b/meson/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,20 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") + +add_library(cmModLib++ SHARED cmMod.cpp) +target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21) +target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42) + +# Test PCH support +if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.16.0") + target_precompile_headers(cmModLib++ PRIVATE "cpp_pch.hpp") +endif() + +include(GenerateExportHeader) +generate_export_header(cmModLib++) diff --git a/meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp b/meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..f4cbea0ce --- /dev/null +++ b/meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,15 @@ +#include "cmMod.hpp" + +using namespace std; + +#if MESON_MAGIC_FLAG != 21 +#error "Invalid MESON_MAGIC_FLAG (private)" +#endif + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} + +string cmModClass::getStr() const { + return str; +} diff --git a/meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp b/meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp new file mode 100644 index 000000000..4445e1f53 --- /dev/null +++ b/meson/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp @@ -0,0 +1,18 @@ +#pragma once + +#include "cmmodlib++_export.h" +#include + +#if MESON_MAGIC_FLAG != 42 && MESON_MAGIC_FLAG != 21 +#error "Invalid MESON_MAGIC_FLAG" +#endif + +class CMMODLIB___EXPORT cmModClass { +private: + std::string str; + +public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp b/meson/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp new file mode 100644 index 000000000..aa7ceb361 --- /dev/null +++ b/meson/test cases/cmake/1 basic/subprojects/cmMod/cpp_pch.hpp @@ -0,0 +1,2 @@ +#include +#include diff --git a/meson/test cases/cmake/10 header only/main.cpp b/meson/test cases/cmake/10 header only/main.cpp new file mode 100644 index 000000000..1417881ff --- /dev/null +++ b/meson/test cases/cmake/10 header only/main.cpp @@ -0,0 +1,16 @@ +#include +#include + +using namespace std; + +#define EXPECTED "Hello World compDef 42" + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + if (obj.getStr() != EXPECTED) { + cerr << "Expected: '" << EXPECTED << "'" << endl; + return 1; + } + return 0; +} diff --git a/meson/test cases/cmake/10 header only/meson.build b/meson/test cases/cmake/10 header only/meson.build new file mode 100644 index 000000000..ca08a3f30 --- /dev/null +++ b/meson/test cases/cmake/10 header only/meson.build @@ -0,0 +1,12 @@ +project('cmakeSubTest', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib') + +assert(sub_pro.target_list() == ['cmModLib'], 'There should be exactly one target') +assert(sub_pro.target_type('cmModLib') == 'header_only', 'Target type should be header_only') + +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..e01b6e260 --- /dev/null +++ b/meson/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,12 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") + +add_library(cmModLib INTERFACE) +set_target_properties(cmModLib PROPERTIES INTERFACE_COMPILE_OPTIONS "-DCMAKE_FLAG_MUST_BE_PRESENT") +target_include_directories(cmModLib INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/include") +target_compile_definitions(cmModLib INTERFACE -DCMAKE_COMPILER_DEFINE_STR="compDef") +target_compile_definitions(cmModLib INTERFACE MESON_MAGIC_FLAG=42) diff --git a/meson/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp b/meson/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp new file mode 100644 index 000000000..fe0104013 --- /dev/null +++ b/meson/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp @@ -0,0 +1,24 @@ +#pragma once + +#include + +#ifndef CMAKE_FLAG_MUST_BE_PRESENT +#error "The flag CMAKE_FLAG_MUST_BE_PRESENT was not set" +#endif + +#define xstr(s) str(s) +#define str(s) #s + +class cmModClass { + private: + std::string str; + public: + cmModClass(std::string foo) { + str = foo + " World "; + str += CMAKE_COMPILER_DEFINE_STR; + str += ' '; + str += xstr(MESON_MAGIC_FLAG); + } + + inline std::string getStr() const { return str; } +}; diff --git a/meson/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake b/meson/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake new file mode 100644 index 000000000..0c663f4fc --- /dev/null +++ b/meson/test cases/cmake/11 cmake_module_path/cmake/FindSomethingLikePython.cmake @@ -0,0 +1,9 @@ +cmake_policy(VERSION 3.7) + +find_package(Python COMPONENTS Interpreter) +if(Python_FOUND OR PYTHONINTERP_FOUND) + set(SomethingLikePython_FOUND ON) + set(SomethingLikePython_EXECUTABLE ${Python_EXECUTABLE}) +else() + set(SomethingLikePython_FOUND OFF) +endif() diff --git a/meson/test cases/cmake/11 cmake_module_path/meson.build b/meson/test cases/cmake/11 cmake_module_path/meson.build new file mode 100644 index 000000000..e20193696 --- /dev/null +++ b/meson/test cases/cmake/11 cmake_module_path/meson.build @@ -0,0 +1,25 @@ +# We use Python3 as it's the only thing guaranteed to be available on any platform Meson can run on (unlike Zlib in linuxlike/13 cmake dependency). + +project('user CMake find_package module using cmake_module_path', ['c', 'cpp'], + meson_version: '>= 0.55.0') + +if not find_program('cmake', required: false).found() + error('MESON_SKIP_TEST cmake binary not available.') +endif + +# NOTE: can't request Python3 via dependency('Python3', method: 'cmake') +# Meson intercepts and wants "method: auto" + +# Try to find a dependency with a custom CMake module + +dependency('SomethingLikePython', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'Python::Interpreter') + +dependency('SomethingLikePython', method : 'cmake', cmake_module_path : ['doesNotExist', 'cmake'], modules: 'Python::Interpreter') + +# Test a custom target with Python::Interpreter in COMMAND +cm = import('cmake') +op = cm.subproject_options() +op.add_cmake_defines({'CMAKE_MODULE_PATH': meson.source_root() / 'cmake'}) +sp = cm.subproject('cmMod', options: op) +main = sp.target('main') +test('main', main) diff --git a/meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..88ba9bc57 --- /dev/null +++ b/meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) + +message(STATUS "CMAKE_MODULE_PATH: '${CMAKE_MODULE_PATH}'") + +find_package(SomethingLikePython REQUIRED) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/main.c" + COMMAND Python::Interpreter "${CMAKE_CURRENT_SOURCE_DIR}/gen.py" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" +) + +add_executable(main "${CMAKE_CURRENT_BINARY_DIR}/main.c") diff --git a/meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py b/meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py new file mode 100644 index 000000000..5c7164604 --- /dev/null +++ b/meson/test cases/cmake/11 cmake_module_path/subprojects/cmMod/gen.py @@ -0,0 +1,9 @@ +with open('main.c', 'w') as fp: + print(''' +#include + +int main(void) { + printf(\"Hello World\"); + return 0; +} +''', file=fp) diff --git a/meson/test cases/cmake/11 cmake_module_path/test.json b/meson/test cases/cmake/11 cmake_module_path/test.json new file mode 100644 index 000000000..79a2b60d3 --- /dev/null +++ b/meson/test cases/cmake/11 cmake_module_path/test.json @@ -0,0 +1,5 @@ +{ + "tools": { + "cmake": ">=3.12" + } +} diff --git a/meson/test cases/cmake/12 generator expressions/main.cpp b/meson/test cases/cmake/12 generator expressions/main.cpp new file mode 100644 index 000000000..95079615a --- /dev/null +++ b/meson/test cases/cmake/12 generator expressions/main.cpp @@ -0,0 +1,10 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/12 generator expressions/meson.build b/meson/test cases/cmake/12 generator expressions/meson.build new file mode 100644 index 000000000..ca08a3f30 --- /dev/null +++ b/meson/test cases/cmake/12 generator expressions/meson.build @@ -0,0 +1,12 @@ +project('cmakeSubTest', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib') + +assert(sub_pro.target_list() == ['cmModLib'], 'There should be exactly one target') +assert(sub_pro.target_type('cmModLib') == 'header_only', 'Target type should be header_only') + +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..dc4f9e43d --- /dev/null +++ b/meson/test cases/cmake/12 generator expressions/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,22 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +include(GNUInstallDirs) + +add_library(cmModLib INTERFACE) + +target_compile_options(cmModLib + INTERFACE $<$,$>:-DCMAKE_FLAG_ERROR_A> # Check discard = false + INTERFACE "-DCMAKE_FLAG_REQUIRED_A" + INTERFACE $<$>,$>>:-DCMAKE_FLAG_REQUIRED_B> + INTERFACE $<$:-DCMAKE_FLAG_REQUIRED_C> +) + +target_include_directories(cmModLib INTERFACE + $ + $ +) + +target_compile_definitions(cmModLib INTERFACE -DCMAKE_COMPILER_DEFINE_STR="compDef") diff --git a/meson/test cases/cmake/12 generator expressions/subprojects/cmMod/include/cmMod.hpp b/meson/test cases/cmake/12 generator expressions/subprojects/cmMod/include/cmMod.hpp new file mode 100644 index 000000000..1f00107d5 --- /dev/null +++ b/meson/test cases/cmake/12 generator expressions/subprojects/cmMod/include/cmMod.hpp @@ -0,0 +1,31 @@ +#pragma once + +#include + +#ifndef CMAKE_FLAG_REQUIRED_A +#error "The flag CMAKE_FLAG_REQUIRED_A was not set" +#endif + +#ifndef CMAKE_FLAG_REQUIRED_B +#error "The flag CMAKE_FLAG_REQUIRED_B was not set" +#endif + +#ifndef CMAKE_FLAG_REQUIRED_C +#error "The flag CMAKE_FLAG_REQUIRED_C was not set" +#endif + +#ifdef CMAKE_FLAG_ERROR_A +#error "The flag CMAKE_FLAG_ERROR_A was set" +#endif + +class cmModClass { + private: + std::string str; + public: + cmModClass(std::string foo) { + str = foo + " World "; + str += CMAKE_COMPILER_DEFINE_STR; + } + + inline std::string getStr() const { return str; } +}; diff --git a/meson/test cases/cmake/13 system includes/main.cpp b/meson/test cases/cmake/13 system includes/main.cpp new file mode 100644 index 000000000..95079615a --- /dev/null +++ b/meson/test cases/cmake/13 system includes/main.cpp @@ -0,0 +1,10 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/13 system includes/meson.build b/meson/test cases/cmake/13 system includes/meson.build new file mode 100644 index 000000000..d1007da91 --- /dev/null +++ b/meson/test cases/cmake/13 system includes/meson.build @@ -0,0 +1,18 @@ +project( + 'meson_cmake_system_include_bug', ['c', 'cpp'], + default_options: [ + 'warning_level=3', + 'werror=true', + ], +) + +if meson.get_compiler('cpp').get_argument_syntax() == 'msvc' + error('MESON_SKIP_TEST: Skipp with msvc due to missing -system support') +endif + +cm = import('cmake') +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib') + +exe1 = executable('main1', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..a6b0ba40c --- /dev/null +++ b/meson/test cases/cmake/13 system includes/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") + +add_library(cmModLib SHARED cmMod.cpp) +include(GenerateExportHeader) +generate_export_header(cmModLib) + +target_compile_options(cmModLib PRIVATE "-Wall" "-Werror") +target_include_directories(cmModLib SYSTEM PRIVATE "sysInc") diff --git a/meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.cpp b/meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..1eaf0cf31 --- /dev/null +++ b/meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,12 @@ +#include "cmMod.hpp" +#include "triggerWarn.hpp" + +using namespace std; + +cmModClass::cmModClass(string foo) { + str = foo + " World " + to_string(bar(World)); +} + +string cmModClass::getStr() const { + return str; +} diff --git a/meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.hpp b/meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.hpp new file mode 100644 index 000000000..52f576bf3 --- /dev/null +++ b/meson/test cases/cmake/13 system includes/subprojects/cmMod/cmMod.hpp @@ -0,0 +1,13 @@ +#pragma once + +#include +#include "cmmodlib_export.h" + +class CMMODLIB_EXPORT cmModClass { + private: + std::string str; + public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/cmake/13 system includes/subprojects/cmMod/sysInc/triggerWarn.hpp b/meson/test cases/cmake/13 system includes/subprojects/cmMod/sysInc/triggerWarn.hpp new file mode 100644 index 000000000..3b00f2db4 --- /dev/null +++ b/meson/test cases/cmake/13 system includes/subprojects/cmMod/sysInc/triggerWarn.hpp @@ -0,0 +1,14 @@ +#pragma once + +enum Foo { + Hello, + World +}; + +inline int bar( Foo foo ) { + switch(foo) { + case Hello: return 0; + // Warn because of missung case for World + } + return 1; +} diff --git a/meson/test cases/cmake/14 fortran threads/meson.build b/meson/test cases/cmake/14 fortran threads/meson.build new file mode 100644 index 000000000..2d2f89208 --- /dev/null +++ b/meson/test cases/cmake/14 fortran threads/meson.build @@ -0,0 +1,12 @@ +project('FortranThreads') + +if not add_languages('fortran', required: false) + error('MESON_SKIP_TEST: Fortran language not available.') +endif + +# want to be sure that CMake can find dependencies where even if the +# project isn't C, the C language is required to find the library. +threads = dependency('threads', method: 'cmake', required: false) +if not threads.found() + error('MESON_SKIP_TEST: CMake backend not working for Fortran / threads') +endif diff --git a/meson/test cases/cmake/15 object library advanced/main.cpp b/meson/test cases/cmake/15 object library advanced/main.cpp new file mode 100644 index 000000000..4cc01a83a --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/main.cpp @@ -0,0 +1,11 @@ +#include +#include "libA.hpp" +#include "libB.hpp" + +using namespace std; + +int main(void) { + cout << getLibStr() << endl; + cout << getZlibVers() << endl; + return EXIT_SUCCESS; +} diff --git a/meson/test cases/cmake/15 object library advanced/meson.build b/meson/test cases/cmake/15 object library advanced/meson.build new file mode 100644 index 000000000..4009a0d14 --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/meson.build @@ -0,0 +1,17 @@ +project('cmake_object_lib_test', 'cpp', default_options: ['cpp_std=c++11']) + +if meson.is_cross_build() + error('MESON_SKIP_TEST this test does not cross compile correctly.') +endif + +cm = import('cmake') + +sub_pro = cm.subproject('cmObjLib') +sub_sha = sub_pro.dependency('lib_sha') +sub_sta = sub_pro.dependency('lib_sta') + +exe_sha = executable('shared', ['main.cpp'], dependencies: [sub_sha]) +exe_sta = executable('static', ['main.cpp'], dependencies: [sub_sta]) + +test('test1', exe_sha) +test('test1', exe_sta) diff --git a/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/CMakeLists.txt b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/CMakeLists.txt new file mode 100644 index 000000000..47f1ad31d --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 3.7) +project(cmObject CXX) + +add_executable(genC genC.cpp) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/libC.cpp" "${CMAKE_CURRENT_BINARY_DIR}/libC.hpp" + COMMAND genC + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" +) + +include_directories("${CMAKE_CURRENT_BINARY_DIR}") + +add_library(lib_obj OBJECT libA.cpp libB.cpp "${CMAKE_CURRENT_BINARY_DIR}/libC.cpp" "${CMAKE_CURRENT_BINARY_DIR}/libC.hpp") +add_library(lib_sha SHARED $) +add_library(lib_sta STATIC $) + +target_compile_definitions(lib_obj PRIVATE "-DBUILD_AS_OBJ=1") diff --git a/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/genC.cpp b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/genC.cpp new file mode 100644 index 000000000..a9e4b5eff --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/genC.cpp @@ -0,0 +1,31 @@ +#include +#include + +using namespace std; + +int main() { + ofstream hpp("libC.hpp"); + ofstream cpp("libC.cpp"); + if (!hpp.is_open() || !cpp.is_open()) { + cerr << "Failed to open 'libC.hpp' or 'libC.cpp' for writing" << endl; + return 1; + } + + hpp << R"cpp( +#pragma once + +#include + +std::string getGenStr(); +)cpp"; + + cpp << R"cpp( +#include "libC.hpp" + +std::string getGenStr(void) { + return "GEN STR"; +} +)cpp"; + + return 0; +} \ No newline at end of file diff --git a/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.cpp b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.cpp new file mode 100644 index 000000000..fd5aa48f1 --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.cpp @@ -0,0 +1,9 @@ +#include "libA.hpp" + +#if not BUILD_AS_OBJ +#error "BUILD_AS_OBJ was not defined" +#endif + +std::string getLibStr(void) { + return "Hello World"; +} diff --git a/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.hpp b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.hpp new file mode 100644 index 000000000..84b7bc7d3 --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libA.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +std::string DLL_PUBLIC getLibStr(); diff --git a/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.cpp b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.cpp new file mode 100644 index 000000000..4b832ec9f --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.cpp @@ -0,0 +1,6 @@ +#include "libB.hpp" +#include "libC.hpp" + +std::string getZlibVers(void) { + return getGenStr(); +} diff --git a/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.hpp b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.hpp new file mode 100644 index 000000000..52ccc1601 --- /dev/null +++ b/meson/test cases/cmake/15 object library advanced/subprojects/cmObjLib/libB.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +std::string DLL_PUBLIC getZlibVers(); diff --git a/meson/test cases/cmake/16 threads/main.cpp b/meson/test cases/cmake/16 threads/main.cpp new file mode 100644 index 000000000..67ee110a3 --- /dev/null +++ b/meson/test cases/cmake/16 threads/main.cpp @@ -0,0 +1,9 @@ +#include "cmMod.hpp" + +#include + +int main() { + CmMod cc; + cc.asyncIncrement(); + return cc.getNum() == 1 ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/meson/test cases/cmake/16 threads/meson.build b/meson/test cases/cmake/16 threads/meson.build new file mode 100644 index 000000000..5efd73ed5 --- /dev/null +++ b/meson/test cases/cmake/16 threads/meson.build @@ -0,0 +1,12 @@ +project('cmMod', ['c', 'cpp']) + +cm = import('cmake') +cmOpts = ['-DUSE_PTHREAD=@0@'.format(get_option('use_pthread'))] +cmMod = cm.subproject('cmMod', cmake_options: cmOpts) +cmModDep1 = cmMod.dependency('cmModLib') +cmModDep2 = cmMod.dependency('cmModLib_shared') + +exe1 = executable('exe1', ['main.cpp'], dependencies: [cmModDep1]) +exe2 = executable('exe2', ['main.cpp'], dependencies: [cmModDep2]) +test('exe1_OK', exe1) +test('exe2_OK', exe2) diff --git a/meson/test cases/cmake/16 threads/meson_options.txt b/meson/test cases/cmake/16 threads/meson_options.txt new file mode 100644 index 000000000..1fd9068e0 --- /dev/null +++ b/meson/test cases/cmake/16 threads/meson_options.txt @@ -0,0 +1 @@ +option('use_pthread', type: 'combo', choices: ['ON', 'OFF', 'NOT_SET'], value: 'ON') diff --git a/meson/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..4d61b0c37 --- /dev/null +++ b/meson/test cases/cmake/16 threads/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod C CXX) +set (CMAKE_CXX_STANDARD 14) + +if(NOT USE_PTHREAD STREQUAL NOT_SET) + set(THREADS_PREFER_PTHREAD_FLAG ${USE_PTHREAD}) +endif() +find_package(Threads) + +add_library(cmModLib STATIC cmMod.cpp) +target_link_libraries(cmModLib PRIVATE Threads::Threads) + +add_library(cmModLib_shared SHARED cmMod.cpp) +target_link_libraries(cmModLib_shared PUBLIC Threads::Threads) diff --git a/meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.cpp b/meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..f971eeba0 --- /dev/null +++ b/meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,15 @@ +#include "cmMod.hpp" + +#include +#include + +using namespace std::chrono_literals; + +void CmMod::asyncIncrement() { + std::thread t1([this]() { + std::this_thread::sleep_for(100ms); + num += 1; + }); + + t1.join(); +} diff --git a/meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.hpp b/meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.hpp new file mode 100644 index 000000000..81c5ec867 --- /dev/null +++ b/meson/test cases/cmake/16 threads/subprojects/cmMod/cmMod.hpp @@ -0,0 +1,21 @@ +#pragma once + +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else +#if defined __GNUC__ +#define DLL_PUBLIC __attribute__((visibility("default"))) +#else +#pragma message("Compiler does not support symbol visibility.") +#define DLL_PUBLIC +#endif +#endif + +class DLL_PUBLIC CmMod { +private: + int num = 0; + +public: + inline int getNum() const { return num; } + void asyncIncrement(); +}; diff --git a/meson/test cases/cmake/16 threads/subprojects/cmMod/main.cpp b/meson/test cases/cmake/16 threads/subprojects/cmMod/main.cpp new file mode 100644 index 000000000..67ee110a3 --- /dev/null +++ b/meson/test cases/cmake/16 threads/subprojects/cmMod/main.cpp @@ -0,0 +1,9 @@ +#include "cmMod.hpp" + +#include + +int main() { + CmMod cc; + cc.asyncIncrement(); + return cc.getNum() == 1 ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/meson/test cases/cmake/16 threads/test.json b/meson/test cases/cmake/16 threads/test.json new file mode 100644 index 000000000..db788b113 --- /dev/null +++ b/meson/test cases/cmake/16 threads/test.json @@ -0,0 +1,11 @@ +{ + "matrix": { + "options": { + "use_pthread": [ + { "val": "ON" }, + { "val": "OFF" }, + { "val": "NOT_SET" } + ] + } + } +} diff --git a/meson/test cases/cmake/17 include path order/main.cpp b/meson/test cases/cmake/17 include path order/main.cpp new file mode 100644 index 000000000..95079615a --- /dev/null +++ b/meson/test cases/cmake/17 include path order/main.cpp @@ -0,0 +1,10 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/17 include path order/meson.build b/meson/test cases/cmake/17 include path order/meson.build new file mode 100644 index 000000000..cf3ec9690 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/meson.build @@ -0,0 +1,9 @@ +project('include_path_order', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib++') + +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..9a252df18 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,34 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +include_directories( + ${CMAKE_CURRENT_BINARY_DIR} + + # The one and only correct include dir + ${CMAKE_CURRENT_SOURCE_DIR}/incG + + # All of these are traps + ${CMAKE_CURRENT_SOURCE_DIR}/incL + ${CMAKE_CURRENT_SOURCE_DIR}/incM + ${CMAKE_CURRENT_SOURCE_DIR}/incO + ${CMAKE_CURRENT_SOURCE_DIR}/incF + ${CMAKE_CURRENT_SOURCE_DIR}/incI + ${CMAKE_CURRENT_SOURCE_DIR}/incE + ${CMAKE_CURRENT_SOURCE_DIR}/incD + ${CMAKE_CURRENT_SOURCE_DIR}/incH + ${CMAKE_CURRENT_SOURCE_DIR}/incN + ${CMAKE_CURRENT_SOURCE_DIR}/incA + ${CMAKE_CURRENT_SOURCE_DIR}/incB + ${CMAKE_CURRENT_SOURCE_DIR}/incJ + ${CMAKE_CURRENT_SOURCE_DIR}/incP + ${CMAKE_CURRENT_SOURCE_DIR}/incC + ${CMAKE_CURRENT_SOURCE_DIR}/incK +) + +add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") + +add_library(cmModLib++ SHARED cmMod.cpp) +include(GenerateExportHeader) +generate_export_header(cmModLib++) diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..d3141d512 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,11 @@ +#include "cmMod.hpp" + +using namespace std; + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} + +string cmModClass::getStr() const { + return str; +} diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp new file mode 100644 index 000000000..6228a31fe --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incA/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (A) +#pragma once + +#error "cmMod.hpp in incA must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp new file mode 100644 index 000000000..60bf14cbd --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incB/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (B) +#pragma once + +#error "cmMod.hpp in incB must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp new file mode 100644 index 000000000..3229e0775 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incC/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (C) +#pragma once + +#error "cmMod.hpp in incC must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp new file mode 100644 index 000000000..b958093ee --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incD/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (D) +#pragma once + +#error "cmMod.hpp in incD must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp new file mode 100644 index 000000000..aea5b6d72 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incE/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (E) +#pragma once + +#error "cmMod.hpp in incE must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp new file mode 100644 index 000000000..1e1e2cb4c --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incF/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (F) +#pragma once + +#error "cmMod.hpp in incF must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp new file mode 100644 index 000000000..0e6dc0484 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incG/cmMod.hpp @@ -0,0 +1,14 @@ +#pragma once + +#include "cmmodlib++_export.h" +#include + +class CMMODLIB___EXPORT cmModClass { +private: + std::string str; + +public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp new file mode 100644 index 000000000..263e701f6 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incH/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (H) +#pragma once + +#error "cmMod.hpp in incH must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp new file mode 100644 index 000000000..a44a89a57 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incI/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (I) +#pragma once + +#error "cmMod.hpp in incI must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp new file mode 100644 index 000000000..118a80973 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incJ/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (J) +#pragma once + +#error "cmMod.hpp in incJ must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp new file mode 100644 index 000000000..8294104b9 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incL/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (L) +#pragma once + +#error "cmMod.hpp in incL must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp new file mode 100644 index 000000000..031c5e9bb --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incM/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (M) +#pragma once + +#error "cmMod.hpp in incM must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp new file mode 100644 index 000000000..9dba6da99 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incN/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (N) +#pragma once + +#error "cmMod.hpp in incN must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp new file mode 100644 index 000000000..233add9f6 --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incO/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (O) +#pragma once + +#error "cmMod.hpp in incO must not be included" diff --git a/meson/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp new file mode 100644 index 000000000..95787453b --- /dev/null +++ b/meson/test cases/cmake/17 include path order/subprojects/cmMod/incP/cmMod.hpp @@ -0,0 +1,4 @@ +// cmMod.hpp (P) +#pragma once + +#error "cmMod.hpp in incP must not be included" diff --git a/meson/test cases/cmake/18 skip include files/main.cpp b/meson/test cases/cmake/18 skip include files/main.cpp new file mode 100644 index 000000000..95079615a --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/main.cpp @@ -0,0 +1,10 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/18 skip include files/meson.build b/meson/test cases/cmake/18 skip include files/meson.build new file mode 100644 index 000000000..b9a300c93 --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/meson.build @@ -0,0 +1,9 @@ +project('cmakeSubTest', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib++') + +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..4db01b32d --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") + +set(SRCS + ${CMAKE_CURRENT_LIST_DIR}/cmMod.hpp + ${CMAKE_CURRENT_LIST_DIR}/cmMod.cpp +) + +add_subdirectory(fakeInc) diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..7551b756b --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,10 @@ +#include "cmMod.hpp" + +using namespace std; + +#define MESON_INCLUDE_IMPL +#include "fakeInc/cmModInc1.cpp" +#include "fakeInc/cmModInc2.cpp" +#include "fakeInc/cmModInc3.cpp" +#include "fakeInc/cmModInc4.cpp" +#undef MESON_INCLUDE_IMPL diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp new file mode 100644 index 000000000..f7b780f11 --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/cmMod.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include "cmmodlib++_export.h" +#include + +class CMMODLIB___EXPORT cmModClass { +private: + std::string str; + + std::string getStr1() const; + std::string getStr2() const; +public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt new file mode 100644 index 000000000..39cd08003 --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/CMakeLists.txt @@ -0,0 +1,30 @@ +list(APPEND SRCS + cmModInc1.cpp + cmModInc2.cpp + cmModInc3.cpp + cmModInc4.cpp +) + +set(SRC_A + cmModInc1.cpp + ${CMAKE_CURRENT_LIST_DIR}/cmModInc2.cpp +) + +set_property( + SOURCE ${SRC_A} + PROPERTY + HEADER_FILE_ONLY ON +) + +set_source_files_properties( + cmModInc3.cpp + ${CMAKE_CURRENT_LIST_DIR}/cmModInc4.cpp + PROPERTIES + LABELS "CMake;Lists;are;fun" + HEADER_FILE_ONLY ON +) + +include_directories(${CMAKE_CURRENT_BINARY_DIR}) +add_library(cmModLib++ SHARED ${SRCS}) +include(GenerateExportHeader) +generate_export_header(cmModLib++) diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp new file mode 100644 index 000000000..b637755c3 --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc1.cpp @@ -0,0 +1,7 @@ +#ifndef MESON_INCLUDE_IMPL +#error "MESON_INCLUDE_IMPL is not defined" +#endif // !MESON_INCLUDE_IMPL + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp new file mode 100644 index 000000000..8a53567da --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc2.cpp @@ -0,0 +1,7 @@ +#ifndef MESON_INCLUDE_IMPL +#error "MESON_INCLUDE_IMPL is not defined" +#endif // !MESON_INCLUDE_IMPL + +string cmModClass::getStr() const { + return getStr2(); +} diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp new file mode 100644 index 000000000..2c8ad125b --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc3.cpp @@ -0,0 +1,7 @@ +#ifndef MESON_INCLUDE_IMPL +#error "MESON_INCLUDE_IMPL is not defined" +#endif // !MESON_INCLUDE_IMPL + +string cmModClass::getStr1() const { + return getStr2(); +} diff --git a/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp new file mode 100644 index 000000000..78a067342 --- /dev/null +++ b/meson/test cases/cmake/18 skip include files/subprojects/cmMod/fakeInc/cmModInc4.cpp @@ -0,0 +1,7 @@ +#ifndef MESON_INCLUDE_IMPL +#error "MESON_INCLUDE_IMPL is not defined" +#endif // !MESON_INCLUDE_IMPL + +string cmModClass::getStr2() const { + return str; +} diff --git a/meson/test cases/cmake/19 advanced options/main.cpp b/meson/test cases/cmake/19 advanced options/main.cpp new file mode 100644 index 000000000..6a071cc21 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/main.cpp @@ -0,0 +1,18 @@ +#include +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + + int v1 = obj.getInt(); + int v2 = getTestInt(); + if (v1 != ((1 + v2) * 2)) { + cerr << "Number test failed" << endl; + return 1; + } + return 0; +} diff --git a/meson/test cases/cmake/19 advanced options/meson.build b/meson/test cases/cmake/19 advanced options/meson.build new file mode 100644 index 000000000..6332ca4b7 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/meson.build @@ -0,0 +1,29 @@ +project('cmake_set_opt', ['c', 'cpp']) + +comp = meson.get_compiler('cpp') +if comp.get_argument_syntax() == 'msvc' + error('MESON_SKIP_TEST: MSVC is not supported because it does not support C++11') +endif + +cm = import('cmake') +opts = cm.subproject_options() + +opts.add_cmake_defines({'SOME_CMAKE_VAR': 'something', 'SOME_OTHER_VAR': true}) + +opts.set_override_option('cpp_std', 'c++11') # Global is C++11 +opts.set_override_option('cpp_std', 'c++14', target: 'cmModLib++') # Override it with C++14 for cmModLib++ + +opts.append_compile_args('cpp', '-DMESON_GLOBAL_FLAG=1') +opts.append_compile_args('cpp', ['-DMESON_SPECIAL_FLAG1=1', ['-DMESON_SPECIAL_FLAG2=1']], target: 'cmModLib++') +opts.append_compile_args('cpp', '-DMESON_MAGIC_INT=42', target: 'cmModLib++') +opts.append_compile_args('cpp', [[[['-DMESON_MAGIC_INT=20']]]], target: 'cmTestLib') + +opts.set_install(false) +opts.set_install(true, target: 'testEXE') + +sp = cm.subproject('cmOpts', options: opts) +dep1 = sp.dependency('cmModLib++') +dep2 = sp.dependency('cmTestLib') + +exe1 = executable('main', ['main.cpp'], dependencies: [dep1, dep2]) +test('test1', exe1) diff --git a/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt new file mode 100644 index 000000000..584841e87 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 3.7) + +project(CmOpts) + +set(CMAKE_CXX_STANDARD 98) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something") + message(FATAL_ERROR "Setting the CMake var failed") +endif() + +add_library(cmModLib++ STATIC cmMod.cpp) +add_library(cmTestLib STATIC cmTest.cpp) +add_executable(testEXE main.cpp) + +target_link_libraries(testEXE cmModLib++) + +install(TARGETS cmTestLib ARCHIVE DESTINATION lib RUNTIME DESTINATION bin) diff --git a/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp new file mode 100644 index 000000000..7651b60a1 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp @@ -0,0 +1,31 @@ +#include "cmMod.hpp" + +using namespace std; + +#if __cplusplus < 201402L +#error "At least C++14 is required" +#endif + +#ifndef MESON_GLOBAL_FLAG +#error "MESON_GLOBAL_FLAG was not set" +#endif + +#ifndef MESON_SPECIAL_FLAG1 +#error "MESON_SPECIAL_FLAG1 was not set" +#endif + +#ifndef MESON_SPECIAL_FLAG2 +#error "MESON_SPECIAL_FLAG2 was not set" +#endif + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} + +string cmModClass::getStr() const { + return str; +} + +int cmModClass::getInt() const { + return MESON_MAGIC_INT; +} diff --git a/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp new file mode 100644 index 000000000..074893657 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp @@ -0,0 +1,14 @@ +#pragma once + +#include + +class cmModClass { +private: + std::string str; + +public: + cmModClass(std::string foo); + + std::string getStr() const; + int getInt() const; +}; diff --git a/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp new file mode 100644 index 000000000..a00cdcd95 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp @@ -0,0 +1,25 @@ +#include "cmTest.hpp" + +#if __cplusplus < 201103L +#error "At least C++11 is required" +#endif + +#if __cplusplus >= 201402L +#error "At most C++11 is required" +#endif + +#ifndef MESON_GLOBAL_FLAG +#error "MESON_GLOBAL_FLAG was not set" +#endif + +#ifdef MESON_SPECIAL_FLAG1 +#error "MESON_SPECIAL_FLAG1 *was* set" +#endif + +#ifdef MESON_SPECIAL_FLAG2 +#error "MESON_SPECIAL_FLAG2 *was* set" +#endif + +int getTestInt() { + return MESON_MAGIC_INT; +} diff --git a/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp new file mode 100644 index 000000000..5a3bf7b49 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp @@ -0,0 +1,3 @@ +#pragma once + +int getTestInt(); diff --git a/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp new file mode 100644 index 000000000..497d1cee0 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp @@ -0,0 +1,10 @@ +#include +#include "cmMod.hpp" + +using namespace std; + +int main(void) { + cmModClass obj("Hello (LIB TEST)"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/19 advanced options/test.json b/meson/test cases/cmake/19 advanced options/test.json new file mode 100644 index 000000000..71ea812c7 --- /dev/null +++ b/meson/test cases/cmake/19 advanced options/test.json @@ -0,0 +1,8 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/testEXE"} + ], + "tools": { + "cmake": ">=3.11" + } +} diff --git a/meson/test cases/cmake/2 advanced/main.cpp b/meson/test cases/cmake/2 advanced/main.cpp new file mode 100644 index 000000000..d823e2964 --- /dev/null +++ b/meson/test cases/cmake/2 advanced/main.cpp @@ -0,0 +1,15 @@ +#include +#include +#include "config.h" + +#if CONFIG_OPT != 42 +#error "Invalid value of CONFIG_OPT" +#endif + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/2 advanced/meson.build b/meson/test cases/cmake/2 advanced/meson.build new file mode 100644 index 000000000..b301bfe42 --- /dev/null +++ b/meson/test cases/cmake/2 advanced/meson.build @@ -0,0 +1,27 @@ +project('cmakeSubTest_advanced', ['c', 'cpp']) + +dep_test = dependency('ZLIB', method: 'cmake', required: false) +if not dep_test.found() + error('MESON_SKIP_TEST: zlib is not installed') +endif + +cm = import('cmake') + +# Test the "normal" subproject call +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib') +sub_sta = sub_pro.dependency('cmModLibStatic') + +# Build some files +exe1 = executable('main1', ['main.cpp'], dependencies: [sub_dep]) +exe2 = executable('main2', ['main.cpp'], dependencies: [sub_sta]) +test('test1', exe1) +test('test2', exe2) + +# Test if we can also extract executables +assert(sub_pro.target_type('testEXE') == 'executable', 'The type must be executable for obvious reasons') +test('test3', sub_pro.target('testEXE')) + +# Test that we can add a new target with the same name as the CMake subproject +exe4 = executable('testEXE', ['main.cpp'], dependencies: [sub_sta]) +test('test4', exe4) diff --git a/meson/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..6258ca0b4 --- /dev/null +++ b/meson/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,38 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set(CMAKE_CXX_STANDARD 14) + +find_package(ZLIB REQUIRED) + +include_directories(${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/lib) + +set(CONFIG_OPT 42) +configure_file("config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/config.h" @ONLY) + +add_library(cmModLib SHARED lib/cmMod.cpp) +add_library(cmModLibStatic STATIC lib/cmMod.cpp) +include(GenerateExportHeader) +generate_export_header(cmModLib) + +set_target_properties(cmModLib PROPERTIES VERSION 1.0.1) + +add_executable(testEXE main.cpp) + +target_link_libraries(cmModLib ZLIB::ZLIB) +target_link_libraries(cmModLibStatic ;ZLIB::ZLIB;) +target_link_libraries(testEXE cmModLib) + +if(APPLE) + find_library(COREFOUNDATION_FRAMEWORK "CoreFoundation") + if(NOT COREFOUNDATION_FRAMEWORK) + message(FATAL_ERROR "CoreFoundation framework not found") + endif() + + target_link_libraries(cmModLibStatic "${COREFOUNDATION_FRAMEWORK}") + target_compile_definitions(cmModLibStatic PUBLIC USE_FRAMEWORK) +endif() + +target_compile_definitions(cmModLibStatic PUBLIC CMMODLIB_STATIC_DEFINE) + +install(TARGETS testEXE LIBRARY DESTINATION lib RUNTIME DESTINATION bin) diff --git a/meson/test cases/cmake/2 advanced/subprojects/cmMod/config.h.in b/meson/test cases/cmake/2 advanced/subprojects/cmMod/config.h.in new file mode 100644 index 000000000..f538ac985 --- /dev/null +++ b/meson/test cases/cmake/2 advanced/subprojects/cmMod/config.h.in @@ -0,0 +1,3 @@ +#pragma once + +#define CONFIG_OPT @CONFIG_OPT@ diff --git a/meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp b/meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp new file mode 100644 index 000000000..eb414382d --- /dev/null +++ b/meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.cpp @@ -0,0 +1,26 @@ +#include "cmMod.hpp" +#include +#include "config.h" + +#if CONFIG_OPT != 42 +#error "Invalid value of CONFIG_OPT" +#endif + +#ifdef USE_FRAMEWORK +#include +#endif + +using namespace std; + +cmModClass::cmModClass(string foo) { + str = foo + " World " + zlibVersion(); + +#ifdef USE_FRAMEWORK + CFStringRef ref = CFStringCreateWithCString(NULL, str.c_str(), kCFStringEncodingUTF8); + CFRelease(ref); +#endif +} + +string cmModClass::getStr() const { + return str; +} diff --git a/meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.hpp b/meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.hpp new file mode 100644 index 000000000..52f576bf3 --- /dev/null +++ b/meson/test cases/cmake/2 advanced/subprojects/cmMod/lib/cmMod.hpp @@ -0,0 +1,13 @@ +#pragma once + +#include +#include "cmmodlib_export.h" + +class CMMODLIB_EXPORT cmModClass { + private: + std::string str; + public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/cmake/2 advanced/subprojects/cmMod/main.cpp b/meson/test cases/cmake/2 advanced/subprojects/cmMod/main.cpp new file mode 100644 index 000000000..77fab68ab --- /dev/null +++ b/meson/test cases/cmake/2 advanced/subprojects/cmMod/main.cpp @@ -0,0 +1,11 @@ +#include +#include +#include "lib/cmMod.hpp" + +using namespace std; + +int main(void) { + cmModClass obj("Hello (LIB TEST)"); + cout << obj.getStr() << " ZLIB: " << zlibVersion() << endl; + return 0; +} diff --git a/meson/test cases/cmake/2 advanced/test.json b/meson/test cases/cmake/2 advanced/test.json new file mode 100644 index 000000000..71ea812c7 --- /dev/null +++ b/meson/test cases/cmake/2 advanced/test.json @@ -0,0 +1,8 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/testEXE"} + ], + "tools": { + "cmake": ">=3.11" + } +} diff --git a/meson/test cases/cmake/20 cmake file/foolib.cmake.in b/meson/test cases/cmake/20 cmake file/foolib.cmake.in new file mode 100644 index 000000000..16e992bc9 --- /dev/null +++ b/meson/test cases/cmake/20 cmake file/foolib.cmake.in @@ -0,0 +1 @@ +@foo@ diff --git a/meson/test cases/cmake/20 cmake file/meson.build b/meson/test cases/cmake/20 cmake file/meson.build new file mode 100644 index 000000000..758bbee2d --- /dev/null +++ b/meson/test cases/cmake/20 cmake file/meson.build @@ -0,0 +1,14 @@ +project( + 'cmake config file', +) + +cmake = import('cmake') + +cmake_conf = configuration_data() +cmake_conf.set_quoted('foo', 'bar') +cmake.configure_package_config_file( + name : 'foolib', + input : 'foolib.cmake.in', + install_dir : get_option('libdir') / 'cmake', + configuration : cmake_conf, +) diff --git a/meson/test cases/cmake/20 cmake file/test.json b/meson/test cases/cmake/20 cmake file/test.json new file mode 100644 index 000000000..a8c4ba305 --- /dev/null +++ b/meson/test cases/cmake/20 cmake file/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"} + ] +} diff --git a/meson/test cases/cmake/21 shared module/meson.build b/meson/test cases/cmake/21 shared module/meson.build new file mode 100644 index 000000000..c6ff95780 --- /dev/null +++ b/meson/test cases/cmake/21 shared module/meson.build @@ -0,0 +1,13 @@ +project('cmakeSharedModule', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('myMod') + +dl = meson.get_compiler('c').find_library('dl', required: false) + +l = shared_library('runtime', 'runtime.c') +e = executable('prog', ['prog.c'], link_with: l, dependencies: [sub_dep, dl]) +m = sub_pro.target('myMod') +test('test1', e, args : m) diff --git a/meson/test cases/cmake/21 shared module/prog.c b/meson/test cases/cmake/21 shared module/prog.c new file mode 100644 index 000000000..228a9766d --- /dev/null +++ b/meson/test cases/cmake/21 shared module/prog.c @@ -0,0 +1,108 @@ + +#include +#include "module.h" + +#if SPECIAL_MAGIC_DEFINE != 42 +#error "SPECIAL_MAGIC_DEFINE is not defined" +#endif + +int func_from_language_runtime(void); +typedef int (*fptr) (void); + +#ifdef _WIN32 + +#include + +static wchar_t* +win32_get_last_error (void) +{ + wchar_t *msg = NULL; + + FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER + | FORMAT_MESSAGE_IGNORE_INSERTS + | FORMAT_MESSAGE_FROM_SYSTEM, + NULL, GetLastError (), 0, + (LPWSTR) &msg, 0, NULL); + return msg; +} + +int main(int argc, char **argv) +{ + HINSTANCE handle; + fptr importedfunc; + int expected, actual; + int ret = 1; + if(argc==0) {}; + + handle = LoadLibraryA (argv[1]); + if (!handle) { + wchar_t *msg = win32_get_last_error (); + printf ("Could not open %s: %S\n", argv[1], msg); + goto nohandle; + } + + importedfunc = (fptr) GetProcAddress (handle, "func"); + if (importedfunc == NULL) { + wchar_t *msg = win32_get_last_error (); + printf ("Could not find 'func': %S\n", msg); + goto out; + } + + actual = importedfunc (); + expected = func_from_language_runtime (); + if (actual != expected) { + printf ("Got %i instead of %i\n", actual, expected); + goto out; + } + + ret = 0; +out: + FreeLibrary (handle); +nohandle: + return ret; +} + +#else + +#include +#include + +int main(int argc, char **argv) { + void *dl; + fptr importedfunc; + int expected, actual; + char *error; + int ret = 1; + if(argc==0) {}; + + dlerror(); + dl = dlopen(argv[1], RTLD_LAZY); + error = dlerror(); + if(error) { + printf("Could not open %s: %s\n", argv[1], error); + goto nodl; + } + + importedfunc = (fptr) dlsym(dl, "func"); + if (importedfunc == NULL) { + printf ("Could not find 'func'\n"); + goto out; + } + + assert(importedfunc != func_from_language_runtime); + + actual = (*importedfunc)(); + expected = func_from_language_runtime (); + if (actual != expected) { + printf ("Got %i instead of %i\n", actual, expected); + goto out; + } + + ret = 0; +out: + dlclose(dl); +nodl: + return ret; +} + +#endif diff --git a/meson/test cases/cmake/21 shared module/runtime.c b/meson/test cases/cmake/21 shared module/runtime.c new file mode 100644 index 000000000..03bde8614 --- /dev/null +++ b/meson/test cases/cmake/21 shared module/runtime.c @@ -0,0 +1,19 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +/* + * This file pretends to be a language runtime that supports extension + * modules. + */ + +int DLL_PUBLIC func_from_language_runtime(void) { + return 86; +} diff --git a/meson/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..d2fcfe3ff --- /dev/null +++ b/meson/test cases/cmake/21 shared module/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,7 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmModule) + +include_directories("${CMAKE_CURRENT_SOURCE_DIR}/module") + +add_library(myMod MODULE "${CMAKE_CURRENT_SOURCE_DIR}/module/module.c") diff --git a/meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c b/meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c new file mode 100644 index 000000000..5dd26d7ef --- /dev/null +++ b/meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.c @@ -0,0 +1,96 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +#if defined(_WIN32) || defined(__CYGWIN__) + +#include + +typedef int (*fptr) (void); + +#ifdef __CYGWIN__ + +#include + +fptr find_any_f (const char *name) { + return (fptr) dlsym(RTLD_DEFAULT, name); +} +#else /* _WIN32 */ + +#include +#include + +static wchar_t* +win32_get_last_error (void) +{ + wchar_t *msg = NULL; + + FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER + | FORMAT_MESSAGE_IGNORE_INSERTS + | FORMAT_MESSAGE_FROM_SYSTEM, + NULL, GetLastError (), 0, + (LPWSTR) &msg, 0, NULL); + return msg; +} + +/* Unlike Linux and OS X, when a library is loaded, all the symbols aren't + * loaded into a single namespace. You must fetch the symbol by iterating over + * all loaded modules. Code for finding the function from any of the loaded + * modules is taken from gmodule.c in glib */ +fptr find_any_f (const char *name) { + fptr f; + HANDLE snapshot; + MODULEENTRY32 me32; + + snapshot = CreateToolhelp32Snapshot (TH32CS_SNAPMODULE, 0); + if (snapshot == (HANDLE) -1) { + wchar_t *msg = win32_get_last_error(); + printf("Could not get snapshot: %S\n", msg); + return 0; + } + + me32.dwSize = sizeof (me32); + + f = NULL; + if (Module32First (snapshot, &me32)) { + do { + if ((f = (fptr) GetProcAddress (me32.hModule, name)) != NULL) + break; + } while (Module32Next (snapshot, &me32)); + } + + CloseHandle (snapshot); + return f; +} +#endif + +int DLL_PUBLIC func(void) { + fptr f; + + f = find_any_f ("func_from_language_runtime"); + if (f != NULL) + return f(); + printf ("Could not find function\n"); + return 1; +} + +#else +/* + * Shared modules often have references to symbols that are not defined + * at link time, but which will be provided from deps of the executable that + * dlopens it. We need to make sure that this works, i.e. that we do + * not pass -Wl,--no-undefined when linking modules. + */ +int func_from_language_runtime(void); + +int DLL_PUBLIC func(void) { + return func_from_language_runtime(); +} +#endif diff --git a/meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h b/meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h new file mode 100644 index 000000000..e1d9c1397 --- /dev/null +++ b/meson/test cases/cmake/21 shared module/subprojects/cmMod/module/module.h @@ -0,0 +1,3 @@ +#pragma once + +#define SPECIAL_MAGIC_DEFINE 42 diff --git a/meson/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt b/meson/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt new file mode 100644 index 000000000..cd915846a --- /dev/null +++ b/meson/test cases/cmake/22 cmake module/cmake_project/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 2.8) +project(cmakeMeson C) + +find_package(cmakeModule REQUIRED) \ No newline at end of file diff --git a/meson/test cases/cmake/22 cmake module/meson.build b/meson/test cases/cmake/22 cmake module/meson.build new file mode 100644 index 000000000..68f9993a6 --- /dev/null +++ b/meson/test cases/cmake/22 cmake module/meson.build @@ -0,0 +1,31 @@ +project('cmakeModule', 'c', version: '1.0.0') + +if build_machine.system() == 'cygwin' + error('MESON_SKIP_TEST CMake is broken on Cygwin.') +endif + +cmake_bin = find_program('cmake', required: false) +if not cmake_bin.found() + error('MESON_SKIP_TEST CMake not installed.') +endif + +cc = meson.get_compiler('c') +if cc.get_id() == 'clang-cl' and meson.backend() == 'ninja' and build_machine.system() == 'windows' + error('MESON_SKIP_TEST CMake installation nor operational for vs2017 clangclx64ninja') +endif + +cmake = import('cmake') + +cmake.write_basic_package_version_file(version: '0.0.1', + name: 'cmakeModule', +) + +conf = configuration_data() +conf.set('MYVAR', 'my variable value') +conf.set_quoted('MYQUOTEDVAR', 'my quoted variable value') + +cmake.configure_package_config_file( + input: 'projectConfig.cmake.in', + name: 'cmakeModule', + configuration: conf, +) diff --git a/meson/test cases/cmake/22 cmake module/projectConfig.cmake.in b/meson/test cases/cmake/22 cmake module/projectConfig.cmake.in new file mode 100644 index 000000000..fa3dfca0f --- /dev/null +++ b/meson/test cases/cmake/22 cmake module/projectConfig.cmake.in @@ -0,0 +1,4 @@ +@PACKAGE_INIT@ + +set(MYVAR "@MYVAR@") +set(MYQUOTEDVAR @MYQUOTEDVAR@) diff --git a/meson/test cases/cmake/22 cmake module/test.json b/meson/test cases/cmake/22 cmake module/test.json new file mode 100644 index 000000000..2a5625a64 --- /dev/null +++ b/meson/test cases/cmake/22 cmake module/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "file", "file": "usr/lib/cmake/cmakeModule/cmakeModuleConfig.cmake"}, + {"type": "file", "file": "usr/lib/cmake/cmakeModule/cmakeModuleConfigVersion.cmake"} + ] +} diff --git a/meson/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake b/meson/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake new file mode 100644 index 000000000..ab5fbace1 --- /dev/null +++ b/meson/test cases/cmake/23 cmake toolchain/CMakeToolchain.cmake @@ -0,0 +1 @@ +set(MESON_TEST_VAR2 VAR2) diff --git a/meson/test cases/cmake/23 cmake toolchain/meson.build b/meson/test cases/cmake/23 cmake toolchain/meson.build new file mode 100644 index 000000000..8399597a5 --- /dev/null +++ b/meson/test cases/cmake/23 cmake toolchain/meson.build @@ -0,0 +1,13 @@ +project('cmake toolchain test', ['c']) + +if meson.is_cross_build() + error('MESON_SKIP_TEST: skip this on cross builds') +endif + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') + +add_languages('cpp') + +sub_pro = cm.subproject('cmModFortran') diff --git a/meson/test cases/cmake/23 cmake toolchain/nativefile.ini.in b/meson/test cases/cmake/23 cmake toolchain/nativefile.ini.in new file mode 100644 index 000000000..1f4037de3 --- /dev/null +++ b/meson/test cases/cmake/23 cmake toolchain/nativefile.ini.in @@ -0,0 +1,9 @@ +[properties] + +cmake_toolchain_file = '@MESON_TEST_ROOT@/CMakeToolchain.cmake' +cmake_skip_compiler_test = 'always' + +[cmake] + +MESON_TEST_VAR1 = 'VAR1 space' +MESON_TEST_VAR2 = 'VAR2 error' diff --git a/meson/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..a00affaeb --- /dev/null +++ b/meson/test cases/cmake/23 cmake toolchain/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,15 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod NONE) + +if(NOT "${MESON_TEST_VAR1}" STREQUAL "VAR1 space") + message(FATAL_ERROR "MESON_TEST_VAR1 -- '${MESON_TEST_VAR1}' != 'VAR1 space'") +endif() + +if(NOT "${MESON_TEST_VAR2}" STREQUAL "VAR2") + message(FATAL_ERROR "MESON_TEST_VAR2 -- '${MESON_TEST_VAR2}' != 'VAR2'") +endif() + +if(NOT DEFINED CMAKE_C_COMPILER_VERSION) + message(FATAL_ERROR "CMAKE_C_COMPILER_VERSION was not defined") +endif() diff --git a/meson/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt b/meson/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt new file mode 100644 index 000000000..ecf1737fc --- /dev/null +++ b/meson/test cases/cmake/23 cmake toolchain/subprojects/cmModFortran/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod NONE) + +if(NOT "${MESON_TEST_VAR1}" STREQUAL "VAR1 space") + message(FATAL_ERROR "MESON_TEST_VAR1 -- '${MESON_TEST_VAR1}' != 'VAR1 space'") +endif() + +if(NOT "${MESON_TEST_VAR2}" STREQUAL "VAR2") + message(FATAL_ERROR "MESON_TEST_VAR2 -- '${MESON_TEST_VAR2}' != 'VAR2'") +endif() + +if(NOT DEFINED CMAKE_C_COMPILER_VERSION) + message(FATAL_ERROR "CMAKE_C_COMPILER_VERSION was not defined") +endif() + +if(NOT DEFINED CMAKE_CXX_COMPILER_VERSION) + message(FATAL_ERROR "CMAKE_CXX_COMPILER_VERSION was not defined") +endif() diff --git a/meson/test cases/cmake/24 mixing languages/main.c b/meson/test cases/cmake/24 mixing languages/main.c new file mode 100644 index 000000000..028a78ea6 --- /dev/null +++ b/meson/test cases/cmake/24 mixing languages/main.c @@ -0,0 +1,5 @@ +#include + +int main(void) { + return doStuff(); +} diff --git a/meson/test cases/cmake/24 mixing languages/meson.build b/meson/test cases/cmake/24 mixing languages/meson.build new file mode 100644 index 000000000..4ab1d8542 --- /dev/null +++ b/meson/test cases/cmake/24 mixing languages/meson.build @@ -0,0 +1,13 @@ +project('CMake mix', ['c', 'cpp']) + +if not add_languages('objc', required : false) + error('MESON_SKIP_TEST: No ObjC compiler') +endif + +cm = import('cmake') + +sub_pro = cm.subproject('cmTest') +sub_dep = sub_pro.dependency('cmTest', include_type: 'system') + +exe1 = executable('exe1', ['main.c'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt new file mode 100644 index 000000000..80a256f0d --- /dev/null +++ b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmTest) + +include_directories(${CMAKE_CURRENT_BINARY_DIR}) + +add_library(cmTest STATIC cmTest.c cmTest.m) +target_compile_definitions(cmTest PUBLIC SOME_MAGIC_DEFINE=42) diff --git a/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c new file mode 100644 index 000000000..066d67671 --- /dev/null +++ b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.c @@ -0,0 +1,13 @@ +#include "cmTest.h" +#include + +#if SOME_MAGIC_DEFINE != 42 +#error "SOME_MAGIC_DEFINE != 42" +#endif + +int foo(int x); + +int doStuff(void) { + printf("Hello World\n"); + return foo(42); +} diff --git a/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h new file mode 100644 index 000000000..a6a5c242e --- /dev/null +++ b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.h @@ -0,0 +1,3 @@ +#pragma once + +int doStuff(void); diff --git a/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m new file mode 100644 index 000000000..16ec8056d --- /dev/null +++ b/meson/test cases/cmake/24 mixing languages/subprojects/cmTest/cmTest.m @@ -0,0 +1,7 @@ +#if SOME_MAGIC_DEFINE != 42 +#error "SOME_MAGIC_DEFINE != 42" +#endif + +int foo(int x) { + return 42 - x; +} diff --git a/meson/test cases/cmake/25 assembler/main.c b/meson/test cases/cmake/25 assembler/main.c new file mode 100644 index 000000000..5aef9670e --- /dev/null +++ b/meson/test cases/cmake/25 assembler/main.c @@ -0,0 +1,18 @@ +#include +#include + +int32_t cmTestFunc(void); + +int main(void) +{ + if (cmTestFunc() > 4200) + { + printf("Test success.\n"); + return 0; + } + else + { + printf("Test failure.\n"); + return 1; + } +} diff --git a/meson/test cases/cmake/25 assembler/meson.build b/meson/test cases/cmake/25 assembler/meson.build new file mode 100644 index 000000000..7180356ad --- /dev/null +++ b/meson/test cases/cmake/25 assembler/meson.build @@ -0,0 +1,9 @@ +project('assembler test', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('cmTest') +sub_dep = sub_pro.dependency('cmTest') + +exe1 = executable('exe1', ['main.c'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt b/meson/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt new file mode 100644 index 000000000..5fb7cd64f --- /dev/null +++ b/meson/test cases/cmake/25 assembler/subprojects/cmTest/CMakeLists.txt @@ -0,0 +1,45 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmTest) + +#Detect processor +if ("${CMAKE_SYSTEM_PROCESSOR}" MATCHES "amd64") + SET(TEST_PROCESSOR "x86_64") +elseif ("${CMAKE_SYSTEM_PROCESSOR}" MATCHES "x86_64") + SET(TEST_PROCESSOR "x86_64") +elseif ("${CMAKE_SYSTEM_PROCESSOR}" MATCHES "i386") + SET(TEST_PROCESSOR "x86") +elseif ("${CMAKE_SYSTEM_PROCESSOR}" MATCHES "i686") + SET(TEST_PROCESSOR "x86") +elseif ("${CMAKE_SYSTEM_PROCESSOR}" MATCHES "arm") + SET(TEST_PROCESSOR "arm") +elseif ("${CMAKE_SYSTEM_PROCESSOR}" MATCHES "aarch64") + SET(TEST_PROCESSOR "arm") +else () + message(FATAL_ERROR, 'MESON_SKIP_TEST: Unsupported Assembler Platform') +endif () + +#Detect ABI +if ("${CMAKE_SYSTEM_NAME}" MATCHES "Linux") + SET(TEST_ABI "sysv") +elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "FreeBSD") + SET(TEST_ABI "sysv") +elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "NetBSD") + SET(TEST_ABI "sysv") +elseif ("${CMAKE_SYSTEM_NAME}" MATCHES "OpenBSD") + SET(TEST_ABI "sysv") +else () + message(FATAL_ERROR, 'MESON_SKIP_TEST: Unsupported Assembler Platform') +endif () + +SET(TEST_PLATFORM "${TEST_PROCESSOR}-${TEST_ABI}") + +if ( ("${TEST_PLATFORM}" MATCHES "x86_64-sysv") + OR ("${TEST_PLATFORM}" MATCHES "x86-sysv") + OR ("${TEST_PLATFORM}" MATCHES "arm-sysv")) + SET(CMAKE_ASM_COMPILER ${CMAKE_C_COMPILER}) + enable_language(ASM) + SET(TEST_SOURCE "cmTestAsm.s") +endif () + +add_library(cmTest STATIC cmTest.c ${TEST_SOURCE}) diff --git a/meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTest.c b/meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTest.c new file mode 100644 index 000000000..e32415c94 --- /dev/null +++ b/meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTest.c @@ -0,0 +1,8 @@ +#include + +extern const int32_t cmTestArea; + +int32_t cmTestFunc(void) +{ + return cmTestArea; +} diff --git a/meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTestAsm.s b/meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTestAsm.s new file mode 100644 index 000000000..8aa83a6cb --- /dev/null +++ b/meson/test cases/cmake/25 assembler/subprojects/cmTest/cmTestAsm.s @@ -0,0 +1,4 @@ +.text +.globl cmTestArea +cmTestArea: + .long 4242 diff --git a/meson/test cases/cmake/3 advanced no dep/main.cpp b/meson/test cases/cmake/3 advanced no dep/main.cpp new file mode 100644 index 000000000..d823e2964 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/main.cpp @@ -0,0 +1,15 @@ +#include +#include +#include "config.h" + +#if CONFIG_OPT != 42 +#error "Invalid value of CONFIG_OPT" +#endif + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/3 advanced no dep/meson.build b/meson/test cases/cmake/3 advanced no dep/meson.build new file mode 100644 index 000000000..f8f183666 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/meson.build @@ -0,0 +1,19 @@ +project('cmakeSubTest_advanced', ['c', 'cpp']) + +cm = import('cmake') + +# Test the "normal" subproject call +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib') +sub_sta = sub_pro.dependency('cmModLibStatic') + +# Build some files +exe1 = executable('main1', ['main.cpp'], dependencies: [sub_dep]) +exe2 = executable('main2', ['main.cpp'], dependencies: [sub_sta]) +test('test1', exe1) +test('test2', exe2) + +# Test if we can also extract executables +assert(sub_pro.target_type('meson-testEXE') == 'executable', 'The type must be executable for obvious reasons') +test('test3', sub_pro.target('meson-testEXE')) +test('test4', sub_pro.target('benchmark')) diff --git a/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..d738d45c9 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,26 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set(CMAKE_CXX_STANDARD 14) + +include_directories(${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/lib) + +set(CONFIG_OPT 42) +configure_file("config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/config.h" @ONLY) + +add_library(cmModLib SHARED lib/cmMod.cpp) +add_library(cmModLibStatic STATIC lib/cmMod.cpp) +include(GenerateExportHeader) +generate_export_header(cmModLib) + +set_target_properties(cmModLib PROPERTIES VERSION 1.0.1) + +add_executable(meson-testEXE main.cpp) +add_executable(benchmark main.cpp) + +target_link_libraries(meson-testEXE cmModLib) +target_link_libraries(benchmark cmModLib) + +target_compile_definitions(cmModLibStatic PUBLIC CMMODLIB_STATIC_DEFINE) + +install(TARGETS meson-testEXE benchmark LIBRARY DESTINATION lib RUNTIME DESTINATION bin) diff --git a/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/config.h.in b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/config.h.in new file mode 100644 index 000000000..f538ac985 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/config.h.in @@ -0,0 +1,3 @@ +#pragma once + +#define CONFIG_OPT @CONFIG_OPT@ diff --git a/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.cpp b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.cpp new file mode 100644 index 000000000..741e8dff0 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.cpp @@ -0,0 +1,16 @@ +#include "cmMod.hpp" +#include "config.h" + +#if CONFIG_OPT != 42 +#error "Invalid value of CONFIG_OPT" +#endif + +using namespace std; + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} + +string cmModClass::getStr() const { + return str; +} diff --git a/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.hpp b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.hpp new file mode 100644 index 000000000..52f576bf3 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/lib/cmMod.hpp @@ -0,0 +1,13 @@ +#pragma once + +#include +#include "cmmodlib_export.h" + +class CMMODLIB_EXPORT cmModClass { + private: + std::string str; + public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/main.cpp b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/main.cpp new file mode 100644 index 000000000..d3e67ca32 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/subprojects/cmMod/main.cpp @@ -0,0 +1,10 @@ +#include +#include "lib/cmMod.hpp" + +using namespace std; + +int main(void) { + cmModClass obj("Hello (LIB TEST)"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/cmake/3 advanced no dep/test.json b/meson/test cases/cmake/3 advanced no dep/test.json new file mode 100644 index 000000000..c27ed82e4 --- /dev/null +++ b/meson/test cases/cmake/3 advanced no dep/test.json @@ -0,0 +1,11 @@ +{ + "installed": [ + {"type": "pdb", "file": "usr/bin/cm_meson_testEXE"}, + {"type": "exe", "file": "usr/bin/cm_meson_testEXE"}, + {"type": "pdb", "file": "usr/bin/cm_benchmark"}, + {"type": "exe", "file": "usr/bin/cm_benchmark"} + ], + "tools": { + "cmake": ">=3.11" + } +} diff --git a/meson/test cases/cmake/4 code gen/main.cpp b/meson/test cases/cmake/4 code gen/main.cpp new file mode 100644 index 000000000..a41204b7d --- /dev/null +++ b/meson/test cases/cmake/4 code gen/main.cpp @@ -0,0 +1,8 @@ +#include +#include "test.hpp" + +using namespace std; + +int main(void) { + cout << getStr() << endl; +} diff --git a/meson/test cases/cmake/4 code gen/meson.build b/meson/test cases/cmake/4 code gen/meson.build new file mode 100644 index 000000000..80c801fa0 --- /dev/null +++ b/meson/test cases/cmake/4 code gen/meson.build @@ -0,0 +1,24 @@ +project('cmake_code_gen', ['c', 'cpp']) + +if meson.is_cross_build() + error('MESON_SKIP_TEST this test does not cross compile correctly.') +endif + +cm = import('cmake') + +# Subproject with the "code generator" +sub_pro = cm.subproject('cmCodeGen') +sub_exe = sub_pro.target('genA') + +# Generate the source +generated = custom_target( + 'cmake-generated', + input: [], + output: ['test.cpp'], + command: [sub_exe, '@OUTPUT@'] +) + +# Build the exe +exe1 = executable('main1', ['main.cpp', generated]) + +test('test1', exe1) diff --git a/meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt b/meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt new file mode 100644 index 000000000..ff50e54d5 --- /dev/null +++ b/meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.7) + +project(CMCodeGen) +set(CMAKE_CXX_STANDARD 14) + +add_executable(genA main.cpp) diff --git a/meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/main.cpp b/meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/main.cpp new file mode 100644 index 000000000..5b7fed2b9 --- /dev/null +++ b/meson/test cases/cmake/4 code gen/subprojects/cmCodeGen/main.cpp @@ -0,0 +1,21 @@ +#include +#include + +using namespace std; + +int main(int argc, const char *argv[]) { + if(argc < 2) { + cerr << argv[0] << " requires an output file!" << endl; + return 1; + } + ofstream out(argv[1]); + out << R"( +#include "test.hpp" + +std::string getStr() { + return "Hello World"; +} +)"; + + return 0; +} diff --git a/meson/test cases/cmake/4 code gen/test.hpp b/meson/test cases/cmake/4 code gen/test.hpp new file mode 100644 index 000000000..8e25a0a5e --- /dev/null +++ b/meson/test cases/cmake/4 code gen/test.hpp @@ -0,0 +1,5 @@ +#pragma once + +#include + +std::string getStr(); diff --git a/meson/test cases/cmake/5 object library/main.cpp b/meson/test cases/cmake/5 object library/main.cpp new file mode 100644 index 000000000..9933ab457 --- /dev/null +++ b/meson/test cases/cmake/5 object library/main.cpp @@ -0,0 +1,11 @@ +#include +#include +#include "libA.hpp" +#include "libB.hpp" + +using namespace std; + +int main(void) { + cout << getLibStr() << " -- " << getZlibVers() << endl; + return EXIT_SUCCESS; +} diff --git a/meson/test cases/cmake/5 object library/meson.build b/meson/test cases/cmake/5 object library/meson.build new file mode 100644 index 000000000..f38a2ddae --- /dev/null +++ b/meson/test cases/cmake/5 object library/meson.build @@ -0,0 +1,21 @@ +project('cmake_object_lib_test', ['c', 'cpp']) + +dep_test = dependency('ZLIB', method: 'cmake', required: false) +if not dep_test.found() + error('MESON_SKIP_TEST: zlib is not installed') +endif + +cm = import('cmake') + +sub_pro = cm.subproject('cmObjLib') +sub_sha = sub_pro.dependency('lib_sha') +sub_sta = sub_pro.dependency('lib_sta') + +# Required for the static library +zlib_dep = dependency('zlib') + +exe_sha = executable('shared', ['main.cpp'], dependencies: [sub_sha]) +exe_sta = executable('static', ['main.cpp'], dependencies: [sub_sta, zlib_dep]) + +test('test1', exe_sha) +test('test1', exe_sta) diff --git a/meson/test cases/cmake/5 object library/subprojects/cmObjLib/CMakeLists.txt b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/CMakeLists.txt new file mode 100644 index 000000000..062496e9d --- /dev/null +++ b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/CMakeLists.txt @@ -0,0 +1,11 @@ +cmake_minimum_required(VERSION 3.7) +project(cmObject CXX) + +find_package(ZLIB REQUIRED) + +add_library(lib_obj OBJECT libA.cpp libB.cpp) +add_library(lib_sha SHARED $) +add_library(lib_sta STATIC $) + +target_link_libraries(lib_sha ZLIB::ZLIB) +target_link_libraries(lib_sta ZLIB::ZLIB) diff --git a/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.cpp b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.cpp new file mode 100644 index 000000000..1d579cfdd --- /dev/null +++ b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.cpp @@ -0,0 +1,5 @@ +#include "libA.hpp" + +std::string getLibStr(void) { + return "Hello World"; +} diff --git a/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.hpp b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.hpp new file mode 100644 index 000000000..84b7bc7d3 --- /dev/null +++ b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libA.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +std::string DLL_PUBLIC getLibStr(); diff --git a/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.cpp b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.cpp new file mode 100644 index 000000000..22fe7c251 --- /dev/null +++ b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.cpp @@ -0,0 +1,6 @@ +#include "libB.hpp" +#include + +std::string getZlibVers(void) { + return zlibVersion(); +} diff --git a/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.hpp b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.hpp new file mode 100644 index 000000000..52ccc1601 --- /dev/null +++ b/meson/test cases/cmake/5 object library/subprojects/cmObjLib/libB.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +std::string DLL_PUBLIC getZlibVers(); diff --git a/meson/test cases/cmake/6 object library no dep/main.cpp b/meson/test cases/cmake/6 object library no dep/main.cpp new file mode 100644 index 000000000..9933ab457 --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/main.cpp @@ -0,0 +1,11 @@ +#include +#include +#include "libA.hpp" +#include "libB.hpp" + +using namespace std; + +int main(void) { + cout << getLibStr() << " -- " << getZlibVers() << endl; + return EXIT_SUCCESS; +} diff --git a/meson/test cases/cmake/6 object library no dep/meson.build b/meson/test cases/cmake/6 object library no dep/meson.build new file mode 100644 index 000000000..65b8700b4 --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/meson.build @@ -0,0 +1,13 @@ +project('cmake_object_lib_test', 'cpp') + +cm = import('cmake') + +sub_pro = cm.subproject('cmObjLib') +sub_sha = sub_pro.dependency('lib_sha') +sub_sta = sub_pro.dependency('lib_sta') + +exe_sha = executable('shared', ['main.cpp'], dependencies: [sub_sha]) +exe_sta = executable('static', ['main.cpp'], dependencies: [sub_sta]) + +test('test1', exe_sha) +test('test1', exe_sta) diff --git a/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/CMakeLists.txt b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/CMakeLists.txt new file mode 100644 index 000000000..9e136af1b --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/CMakeLists.txt @@ -0,0 +1,6 @@ +cmake_minimum_required(VERSION 3.7) +project(cmObject CXX) + +add_library(lib_obj OBJECT libA.cpp libB.cpp) +add_library(lib_sha SHARED $) +add_library(lib_sta STATIC $) diff --git a/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.cpp b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.cpp new file mode 100644 index 000000000..1d579cfdd --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.cpp @@ -0,0 +1,5 @@ +#include "libA.hpp" + +std::string getLibStr(void) { + return "Hello World"; +} diff --git a/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.hpp b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.hpp new file mode 100644 index 000000000..84b7bc7d3 --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libA.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +std::string DLL_PUBLIC getLibStr(); diff --git a/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.cpp b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.cpp new file mode 100644 index 000000000..aa44816d5 --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.cpp @@ -0,0 +1,5 @@ +#include "libB.hpp" + +std::string getZlibVers(void) { + return "STUB"; +} diff --git a/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.hpp b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.hpp new file mode 100644 index 000000000..52ccc1601 --- /dev/null +++ b/meson/test cases/cmake/6 object library no dep/subprojects/cmObjLib/libB.hpp @@ -0,0 +1,16 @@ +#pragma once + +#include + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +std::string DLL_PUBLIC getZlibVers(); diff --git a/meson/test cases/cmake/7 cmake options/meson.build b/meson/test cases/cmake/7 cmake options/meson.build new file mode 100644 index 000000000..8bb6d1d5c --- /dev/null +++ b/meson/test cases/cmake/7 cmake options/meson.build @@ -0,0 +1,3 @@ +project('cmake_set_opt', ['c', 'cpp']) + +import('cmake').subproject('cmOpts', cmake_options: '-DSOME_CMAKE_VAR=something') diff --git a/meson/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt b/meson/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt new file mode 100644 index 000000000..873b9b3b6 --- /dev/null +++ b/meson/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required(VERSION 3.7) +project(testPro) + +if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something") + message(FATAL_ERROR "Setting the CMake var failed") +endif() + +if(NOT "${CMAKE_PREFIX_PATH}" STREQUAL "val1;val2") + message(FATAL_ERROR "Setting the CMAKE_PREFIX_PATH failed '${CMAKE_PREFIX_PATH}'") +endif() diff --git a/meson/test cases/cmake/7 cmake options/test.json b/meson/test cases/cmake/7 cmake options/test.json new file mode 100644 index 000000000..f9f0b05a7 --- /dev/null +++ b/meson/test cases/cmake/7 cmake options/test.json @@ -0,0 +1,12 @@ +{ + "matrix": { + "options": { + "cmake_prefix_path": [ + { "val": ["val1", "val2"] } + ], + "build.cmake_prefix_path": [ + { "val": ["val1", "val2"] } + ] + } + } +} diff --git a/meson/test cases/cmake/8 custom command/main.cpp b/meson/test cases/cmake/8 custom command/main.cpp new file mode 100644 index 000000000..7558d60cb --- /dev/null +++ b/meson/test cases/cmake/8 custom command/main.cpp @@ -0,0 +1,11 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + cout << obj.getOther() << endl; + return 0; +} diff --git a/meson/test cases/cmake/8 custom command/meson.build b/meson/test cases/cmake/8 custom command/meson.build new file mode 100644 index 000000000..a2622523a --- /dev/null +++ b/meson/test cases/cmake/8 custom command/meson.build @@ -0,0 +1,16 @@ +project('cmakeSubTest', ['c', 'cpp']) + +if meson.is_cross_build() + error('MESON_SKIP_TEST this test does not cross compile correctly.') +endif + +cm = import('cmake') + +sub_pro = cm.subproject('cmMod') +sub_dep = sub_pro.dependency('cmModLib') + +assert(sub_pro.target_type('cmModLib') == 'shared_library', 'Target type should be shared_library') +assert(sub_pro.target_type('gen') == 'executable', 'Target type should be executable') + +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt b/meson/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..e27a4690e --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,159 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) +set (CMAKE_CXX_STANDARD_REQUIRED ON) + +include_directories(${CMAKE_CURRENT_BINARY_DIR}) +add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") + +add_executable(genMain genMain.cpp) +add_custom_command(OUTPUT main.cpp COMMAND genMain > main.cpp) + +add_executable(gen main.cpp) +add_executable(mycpy cp.cpp) + +# cpyBase +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/genTest.cpp" "${CMAKE_CURRENT_BINARY_DIR}/genTest.hpp" + COMMAND gen ARGS genTest +) + +set(CMD_PART) +list(APPEND CMD_PART COMMAND mycpy cpyBase.cpp.in cpyBase.cpp.in.gen) +list(APPEND CMD_PART COMMAND mycpy cpyBase.cpp.in.gen cpyBase.cpp.out) +list(APPEND CMD_PART COMMAND mycpy cpyBase.cpp.out cpyBase.cpp.something) + +add_custom_command( + OUTPUT cpyBase.cpp + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyBase.cpp.am" cpyBase.cpp.in + ${CMD_PART} + COMMAND mycpy cpyBase.cpp.in cpyBase.cpp.something + COMMAND mycpy cpyBase.cpp.something cpyBase.cpp.IAmRunningOutOfIdeas + COMMAND mycpy cpyBase.cpp.IAmRunningOutOfIdeas cpyBase.cpp + DEPENDS cpyBase.cpp.am;gen +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyBase.hpp.in" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyBase.hpp.am" cpyBase.hpp.in + DEPENDS cpyBase.hpp.am +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyBase.hpp.something" + COMMAND mycpy cpyBase.hpp.in cpyBase.hpp.something + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/cpyBase.hpp.in" +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyBase.hpp" + COMMAND mycpy cpyBase.hpp.something cpyBase.hpp + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/cpyBase.hpp.something" +) + +# cpyNext (out of order is on purpose) +# -- first copy round +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/s1_a_hpp/file.txt" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyNext.hpp.am" file.txt + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyNext.hpp.am" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/s1_a_hpp" +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/s1_b_cpp/file.txt" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyNext.cpp.am" file.txt + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyNext.cpp.am" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/s1_b_cpp" +) + +# -- final cpy round +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyNext.hpp" + COMMAND mycpy "${CMAKE_CURRENT_BINARY_DIR}/s2_b_hpp/file.txt" cpyNext.hpp + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/s2_b_hpp/file.txt" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyNext.cpp" + COMMAND mycpy "${CMAKE_CURRENT_BINARY_DIR}/s2_a_cpp/file.txt" cpyNext.cpp + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/s2_a_cpp/file.txt" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}" +) + +# -- second copy round +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/s2_b_hpp/file.txt" + COMMAND mycpy "${CMAKE_CURRENT_BINARY_DIR}/s1_a_hpp/file.txt" file.txt + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/s1_a_hpp/file.txt" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/s2_b_hpp" +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/s2_a_cpp/file.txt" + COMMAND mycpy "${CMAKE_CURRENT_BINARY_DIR}/s1_b_cpp/file.txt" file.txt + DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/s1_b_cpp/file.txt" + WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/s2_a_cpp" +) + +# cpyTest (copy file without renaming) +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest.hpp" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest.hpp" "${CMAKE_CURRENT_BINARY_DIR}/cpyTest.hpp" + DEPENDS "cpyTest/cpyTest.hpp" +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest2.hpp" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest2.hpp" "${CMAKE_CURRENT_BINARY_DIR}/cpyTest2.hpp" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest2.hpp" +) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest3.hpp" + COMMAND mycpy cpyTest3.hpp "${CMAKE_CURRENT_BINARY_DIR}/cpyTest3.hpp" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest3.hpp" + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest" +) + +add_subdirectory(cpyTest ccppyyTTeesstt) + +add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyTest/some/directory/cpyTest5.hpp" + COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest5.hpp" "${CMAKE_CURRENT_BINARY_DIR}/cpyTest/some/directory/cpyTest5.hpp" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest/cpyTest5.hpp" +) +include_directories("${CMAKE_CURRENT_BINARY_DIR}/cpyTest/some") + +add_library(cmModLib SHARED cmMod.cpp genTest.cpp cpyBase.cpp cpyBase.hpp cpyNext.cpp cpyNext.hpp cpyTest.cpp cpyTest.hpp cpyTest2.hpp cpyTest3.hpp cpyTest/some/directory/cpyTest5.hpp) +include(GenerateExportHeader) +generate_export_header(cmModLib) + +set(ARGS_TEST arg1) +set(ARGS_TEST ${ARGS_TEST} arg2) + +add_executable(macro_name macro_name.cpp) +add_executable(args_test args_test.cpp) +add_custom_target(args_test_cmd + COMMAND args_test ${ARGS_TEST} +) +add_custom_target(macro_name_cmd COMMAND macro_name) + +if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + message(STATUS "Running the -include test case on macro_name") + add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp" + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyInc.hpp.am" "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/cpyInc.hpp.am" + ) + target_compile_options(macro_name PUBLIC -DTEST_CMD_INCLUDE -include "${CMAKE_CURRENT_BINARY_DIR}/cpyInc.hpp") +endif() + +# Only executable targets are replaced in the command +# all other target names are kept as is +add_custom_target(clang-format COMMAND clang-format -i cmMod.cpp) + +add_dependencies(cmModLib args_test_cmd tgtCpyTest4) +add_dependencies(args_test_cmd macro_name_cmd;gen;mycpy) diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/args_test.cpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/args_test.cpp new file mode 100644 index 000000000..abb8a4266 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/args_test.cpp @@ -0,0 +1,18 @@ +#include +#include + +using namespace std; + +int main(int argc, const char *argv[]) { + if(argc != 3 || string(argv[1]) != "arg1" || string(argv[2]) != "arg2") { + cerr << argv[0] << " requires 2 args" << endl; + return 1; + } + + ifstream in1("macro_name.txt"); + ofstream out1("cmModLib.hpp"); + out1 << "#define " << in1.rdbuf() << " = \"plop\""; + + + return 0; +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..e4d531829 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,24 @@ +#include "cmMod.hpp" +#include "genTest.hpp" +#include "cpyBase.hpp" +#include "cpyNext.hpp" +#include "cpyTest.hpp" +#include "cmModLib.hpp" + +#ifndef FOO +#error FOO not declared +#endif + +using namespace std; + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} + +string cmModClass::getStr() const { + return str; +} + +string cmModClass::getOther() const { + return "Srings:\n - " + getStrCpy() + "\n - " + getStrNext() + "\n - " + getStrCpyTest(); +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.hpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.hpp new file mode 100644 index 000000000..cfdbe880f --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cmMod.hpp @@ -0,0 +1,14 @@ +#pragma once + +#include +#include "cmmodlib_export.h" + +class CMMODLIB_EXPORT cmModClass { + private: + std::string str; + public: + cmModClass(std::string foo); + + std::string getStr() const; + std::string getOther() const; +}; diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cp.cpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cp.cpp new file mode 100644 index 000000000..09433f24f --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cp.cpp @@ -0,0 +1,22 @@ +#include +#include + +using namespace std; + +int main(int argc, char *argv[]) { + if(argc < 3) { + cerr << argv[0] << " requires an input and an output file!" << endl; + return 1; + } + + ifstream src(argv[1]); + ofstream dst(argv[2]); + + if(!src.is_open()) { + cerr << "Failed to open " << argv[1] << endl; + return 2; + } + + dst << src.rdbuf(); + return 0; +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.cpp.am b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.cpp.am new file mode 100644 index 000000000..98dd09c9c --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.cpp.am @@ -0,0 +1,5 @@ +#include "cpyBase.hpp" + +std::string getStrCpy() { + return "Hello Copied File"; +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.hpp.am b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.hpp.am new file mode 100644 index 000000000..c255fb1d3 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyBase.hpp.am @@ -0,0 +1,5 @@ +#pragma once + +#include + +std::string getStrCpy(); diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am new file mode 100644 index 000000000..07c8ff790 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyInc.hpp.am @@ -0,0 +1,3 @@ +#pragma once + +#define CPY_INC_WAS_INCLUDED 1 diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.cpp.am b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.cpp.am new file mode 100644 index 000000000..20a8815ae --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.cpp.am @@ -0,0 +1,5 @@ +#include "cpyNext.hpp" + +std::string getStrNext() { + return "Hello Copied File -- now even more convoluted!"; +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.hpp.am b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.hpp.am new file mode 100644 index 000000000..41919d8d6 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyNext.hpp.am @@ -0,0 +1,5 @@ +#pragma once + +#include + +std::string getStrNext(); diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp new file mode 100644 index 000000000..627b8f900 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest.cpp @@ -0,0 +1,9 @@ +#include "cpyTest.hpp" +#include "cpyTest2.hpp" +#include "cpyTest3.hpp" +#include "ccppyyTTeesstt/cpyTest4.hpp" +#include "directory/cpyTest5.hpp" + +std::string getStrCpyTest() { + return CPY_TEST_STR_2 CPY_TEST_STR_3 CPY_TEST_STR_4 CPY_TEST_STR_5; +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt new file mode 100644 index 000000000..f577dcf2a --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/CMakeLists.txt @@ -0,0 +1,7 @@ +add_custom_command( + OUTPUT cpyTest4.hpp + COMMAND mycpy "${CMAKE_CURRENT_SOURCE_DIR}/cpyTest4.hpp" cpyTest4.hpp + DEPENDS cpyTest4.hpp +) + +add_custom_target(tgtCpyTest4 DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/cpyTest4.hpp") diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp new file mode 100644 index 000000000..e8dec13c0 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest.hpp @@ -0,0 +1,5 @@ +#pragma once + +#include + +std::string getStrCpyTest(); diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp new file mode 100644 index 000000000..bdbcc56cb --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest2.hpp @@ -0,0 +1,3 @@ +#pragma once + +#define CPY_TEST_STR_2 "Hello " diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp new file mode 100644 index 000000000..2d13376b1 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest3.hpp @@ -0,0 +1,3 @@ +#pragma once + +#define CPY_TEST_STR_3 "CopyFile" diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp new file mode 100644 index 000000000..4124c430b --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest4.hpp @@ -0,0 +1,3 @@ +#pragma once + +#define CPY_TEST_STR_4 " test" diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp new file mode 100644 index 000000000..3669f00b4 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/cpyTest/cpyTest5.hpp @@ -0,0 +1,3 @@ +#pragma once + +#define CPY_TEST_STR_5 " test" diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp new file mode 100644 index 000000000..33f020159 --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/genMain.cpp @@ -0,0 +1,40 @@ +#include + +using namespace std; + +int main() { + cout << R"asd( +#include +#include + +using namespace std; + +int main(int argc, const char *argv[]) { + if(argc < 2) { + cerr << argv[0] << " requires an output file!" << endl; + return 1; + } + ofstream out1(string(argv[1]) + ".hpp"); + ofstream out2(string(argv[1]) + ".cpp"); + out1 << R"( +#pragma once + +#include + +std::string getStr(); +)"; + + out2 << R"( +#include ")" << argv[1] << R"(.hpp" + +std::string getStr() { + return "Hello World"; +} +)"; + + return 0; +} +)asd"; + + return 0; +} diff --git a/meson/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp b/meson/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp new file mode 100644 index 000000000..964062ffe --- /dev/null +++ b/meson/test cases/cmake/8 custom command/subprojects/cmMod/macro_name.cpp @@ -0,0 +1,20 @@ +#include +#include +#include +#include + +using namespace std; + +#ifdef TEST_CMD_INCLUDE +#if CPY_INC_WAS_INCLUDED != 1 +#error "cpyInc.hpp was not included" +#endif +#endif + +int main() { + this_thread::sleep_for(chrono::seconds(1)); + ofstream out1("macro_name.txt"); + out1 << "FOO"; + + return 0; +} diff --git a/meson/test cases/cmake/9 disabled subproject/meson.build b/meson/test cases/cmake/9 disabled subproject/meson.build new file mode 100644 index 000000000..c153fa3a3 --- /dev/null +++ b/meson/test cases/cmake/9 disabled subproject/meson.build @@ -0,0 +1,6 @@ +project('cmakeSubTest', ['c', 'cpp']) + +cm = import('cmake') + +sub_pro = cm.subproject('nothinig', required: false) +assert(not sub_pro.found(), 'subproject found() reports wrong value') diff --git a/meson/test cases/common/1 trivial/meson.build b/meson/test cases/common/1 trivial/meson.build new file mode 100644 index 000000000..2e424d507 --- /dev/null +++ b/meson/test cases/common/1 trivial/meson.build @@ -0,0 +1,29 @@ +# Comment on the first line +project('trivial test', + # Comment inside a function call + array for language list + ['c'], default_options: ['buildtype=debug'], + meson_version : '>=0.52.0') +#this is a comment +sources = 'trivial.c' + +cc = meson.get_compiler('c') +if cc.get_id() == 'intel' + # Error out if the -std=xxx option is incorrect + add_project_arguments('-diag-error', '10159', language : 'c') +elif cc.get_id() == 'intel-cl' + add_project_arguments('/Qdiag-error:10159', language : 'c') +endif + +exe = executable('trivialprog', sources : sources) +assert(exe.name() == 'trivialprog') +test('runtest', exe) # This is a comment + +has_not_changed = false +if is_disabler(exe) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Executable has changed.') + +assert(not is_disabler(exe), 'Executable is a disabler.') diff --git a/meson/test cases/common/1 trivial/trivial.c b/meson/test cases/common/1 trivial/trivial.c new file mode 100644 index 000000000..96612d48b --- /dev/null +++ b/meson/test cases/common/1 trivial/trivial.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("Trivial test is working.\n"); + return 0; +} diff --git a/meson/test cases/common/10 man install/bar.2 b/meson/test cases/common/10 man install/bar.2 new file mode 100644 index 000000000..9d82d7e15 --- /dev/null +++ b/meson/test cases/common/10 man install/bar.2 @@ -0,0 +1 @@ +this is a man page of bar.2, its contents are irrelevant \ No newline at end of file diff --git a/meson/test cases/common/10 man install/baz.1.in b/meson/test cases/common/10 man install/baz.1.in new file mode 100644 index 000000000..d0b79b43e --- /dev/null +++ b/meson/test cases/common/10 man install/baz.1.in @@ -0,0 +1,6 @@ +This is a man page of baz.1 it was generated @TODAY@. + +You should not put generation timestamps in real world projects +because they break reproducible builds. This manpage is written +by professionals or under the supervision of professionals. Do +not try this at home. diff --git a/meson/test cases/common/10 man install/foo.1 b/meson/test cases/common/10 man install/foo.1 new file mode 100644 index 000000000..647c0970c --- /dev/null +++ b/meson/test cases/common/10 man install/foo.1 @@ -0,0 +1 @@ +this is a man page of foo.1 its contents are irrelevant diff --git a/meson/test cases/common/10 man install/foo.fr.1 b/meson/test cases/common/10 man install/foo.fr.1 new file mode 100644 index 000000000..647c0970c --- /dev/null +++ b/meson/test cases/common/10 man install/foo.fr.1 @@ -0,0 +1 @@ +this is a man page of foo.1 its contents are irrelevant diff --git a/meson/test cases/common/10 man install/meson.build b/meson/test cases/common/10 man install/meson.build new file mode 100644 index 000000000..05c52782e --- /dev/null +++ b/meson/test cases/common/10 man install/meson.build @@ -0,0 +1,14 @@ +project('man install', 'c') +m1 = install_man('foo.1') +m2 = install_man('bar.2') +m3 = install_man('foo.fr.1', locale: 'fr') +install_man('vanishing/vanishing.2') +subdir('vanishing') + +cdata = configuration_data() +cdata.set('TODAY', '$this_day') +b1 = configure_file(input : 'baz.1.in', + output : 'baz.1', + configuration : cdata) + +install_man(b1) diff --git a/meson/test cases/common/10 man install/test.json b/meson/test cases/common/10 man install/test.json new file mode 100644 index 000000000..5ef673a28 --- /dev/null +++ b/meson/test cases/common/10 man install/test.json @@ -0,0 +1,10 @@ +{ + "installed": [ + { "type": "file", "file": "usr/share/man/man1/foo.1" }, + { "type": "file", "file": "usr/share/man/fr/man1/foo.1" }, + { "type": "file", "file": "usr/share/man/man2/bar.2" }, + { "type": "file", "file": "usr/share/man/man1/vanishing.1" }, + { "type": "file", "file": "usr/share/man/man2/vanishing.2" }, + { "type": "file", "file": "usr/share/man/man1/baz.1" } + ] +} diff --git a/meson/test cases/common/10 man install/vanishing/meson.build b/meson/test cases/common/10 man install/vanishing/meson.build new file mode 100644 index 000000000..101545039 --- /dev/null +++ b/meson/test cases/common/10 man install/vanishing/meson.build @@ -0,0 +1 @@ +install_man('vanishing.1') diff --git a/meson/test cases/common/10 man install/vanishing/vanishing.1 b/meson/test cases/common/10 man install/vanishing/vanishing.1 new file mode 100644 index 000000000..532608e8f --- /dev/null +++ b/meson/test cases/common/10 man install/vanishing/vanishing.1 @@ -0,0 +1 @@ +This is a man page of the vanishing subdirectory. diff --git a/meson/test cases/common/10 man install/vanishing/vanishing.2 b/meson/test cases/common/10 man install/vanishing/vanishing.2 new file mode 100644 index 000000000..d12f76ac8 --- /dev/null +++ b/meson/test cases/common/10 man install/vanishing/vanishing.2 @@ -0,0 +1 @@ +This is a second man page of the vanishing subdirectory. diff --git a/meson/test cases/common/100 postconf with args/meson.build b/meson/test cases/common/100 postconf with args/meson.build new file mode 100644 index 000000000..a34502c33 --- /dev/null +++ b/meson/test cases/common/100 postconf with args/meson.build @@ -0,0 +1,10 @@ +project('postconf script', 'c') + +conf = configure_file( + configuration : configuration_data(), + output : 'out' +) + +meson.add_postconf_script(find_program('postconf.py'), '5', '33', conf) + +test('post', executable('prog', 'prog.c')) diff --git a/meson/test cases/common/100 postconf with args/postconf.py b/meson/test cases/common/100 postconf with args/postconf.py new file mode 100644 index 000000000..cef7f790f --- /dev/null +++ b/meson/test cases/common/100 postconf with args/postconf.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import sys, os + +template = '''#pragma once + +#define THE_NUMBER {} +#define THE_ARG1 {} +#define THE_ARG2 {} +''' + +input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat') +output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h') + +with open(input_file) as f: + data = f.readline().strip() +with open(output_file, 'w') as f: + f.write(template.format(data, sys.argv[1], sys.argv[2])) diff --git a/meson/test cases/common/100 postconf with args/prog.c b/meson/test cases/common/100 postconf with args/prog.c new file mode 100644 index 000000000..5db9d17df --- /dev/null +++ b/meson/test cases/common/100 postconf with args/prog.c @@ -0,0 +1,5 @@ +#include"generated.h" + +int main(void) { + return THE_NUMBER != 9 || THE_ARG1 != 5 || THE_ARG2 != 33; +} diff --git a/meson/test cases/common/100 postconf with args/raw.dat b/meson/test cases/common/100 postconf with args/raw.dat new file mode 100644 index 000000000..ec635144f --- /dev/null +++ b/meson/test cases/common/100 postconf with args/raw.dat @@ -0,0 +1 @@ +9 diff --git a/meson/test cases/common/101 testframework options/meson.build b/meson/test cases/common/101 testframework options/meson.build new file mode 100644 index 000000000..827bae76a --- /dev/null +++ b/meson/test cases/common/101 testframework options/meson.build @@ -0,0 +1,8 @@ +# normally run only from run_tests.py or run_project_tests.py +# else do like +# meson build '-Dtestoption=A string with spaces' -Dother_one=true -Dcombo_opt=one -Dprefix=/usr -Dlibdir=lib -Dbackend=ninja -Dwerror=True +project('options', 'c') + +assert(get_option('testoption') == 'A string with spaces', 'Incorrect value for testoption option.') +assert(get_option('other_one') == true, 'Incorrect value for other_one option.') +assert(get_option('combo_opt') == 'one', 'Incorrect value for combo_opt option.') diff --git a/meson/test cases/common/101 testframework options/meson_options.txt b/meson/test cases/common/101 testframework options/meson_options.txt new file mode 100644 index 000000000..653dd75f9 --- /dev/null +++ b/meson/test cases/common/101 testframework options/meson_options.txt @@ -0,0 +1,3 @@ +option('testoption', type : 'string', value : 'optval', description : 'An option to do something') +option('other_one', type : 'boolean', value : false) +option('combo_opt', type : 'combo', choices : ['one', 'two', 'combo'], value : 'combo') diff --git a/meson/test cases/common/101 testframework options/test.json b/meson/test cases/common/101 testframework options/test.json new file mode 100644 index 000000000..65bf3c04f --- /dev/null +++ b/meson/test cases/common/101 testframework options/test.json @@ -0,0 +1,10 @@ +{ + "matrix": { + "options": { + "testoption": [{ "val": "A string with spaces" }], + "other_one": [{ "val": "true" }], + "combo_opt": [{ "val": "one" }], + "werror": [{ "val": "true" }] + } + } +} diff --git a/meson/test cases/common/102 extract same name/lib.c b/meson/test cases/common/102 extract same name/lib.c new file mode 100644 index 000000000..f3d0417ea --- /dev/null +++ b/meson/test cases/common/102 extract same name/lib.c @@ -0,0 +1,3 @@ +int func1(void) { + return 23; +} diff --git a/meson/test cases/common/102 extract same name/main.c b/meson/test cases/common/102 extract same name/main.c new file mode 100644 index 000000000..e5a0c1eab --- /dev/null +++ b/meson/test cases/common/102 extract same name/main.c @@ -0,0 +1,6 @@ +int func1(void); +int func2(void); + +int main(void) { + return !(func1() == 23 && func2() == 42); +} diff --git a/meson/test cases/common/102 extract same name/meson.build b/meson/test cases/common/102 extract same name/meson.build new file mode 100644 index 000000000..08daa5b22 --- /dev/null +++ b/meson/test cases/common/102 extract same name/meson.build @@ -0,0 +1,19 @@ +project('object extraction', 'c') + +if meson.backend() == 'xcode' + # Xcode gives object files unique names but only if they would clash. For example + # two files named lib.o instead get the following names: + # + # lib-4fbe522d8ba4cb1f1b89cc2df640a2336b92e1a5565f0a4c5a79b5b5e2969eb9.o + # lib-4fbe522d8ba4cb1f1b89cc2df640a2336deeff2bc2297affaadbe20f5cbfee56.o + # + # No-one has reverse engineered the naming scheme so we would access them. + # IF you feel up to the challenge, patches welcome. + error('MESON_SKIP_TEST, Xcode can not extract objs when they would have the same filename.') +endif + +lib = library('somelib', ['lib.c', 'src/lib.c']) +# Also tests that the object list is flattened properly +obj = lib.extract_objects(['lib.c', ['src/lib.c']]) +exe = executable('main', 'main.c', objects: obj) +test('extraction', exe) diff --git a/meson/test cases/common/102 extract same name/src/lib.c b/meson/test cases/common/102 extract same name/src/lib.c new file mode 100644 index 000000000..a7d7e77f9 --- /dev/null +++ b/meson/test cases/common/102 extract same name/src/lib.c @@ -0,0 +1,3 @@ +int func2(void) { + return 42; +} diff --git a/meson/test cases/common/103 has header symbol/meson.build b/meson/test cases/common/103 has header symbol/meson.build new file mode 100644 index 000000000..459049100 --- /dev/null +++ b/meson/test cases/common/103 has header symbol/meson.build @@ -0,0 +1,40 @@ +project( + 'has header symbol', + 'c', 'cpp', + default_options : ['cpp_std=c++11'], +) + +cc = meson.get_compiler('c') +cpp = meson.get_compiler('cpp') + +foreach comp : [cc, cpp] + assert (comp.has_header_symbol('stdio.h', 'int'), 'base types should always be available') + assert (comp.has_header_symbol('stdio.h', 'printf'), 'printf function not found') + assert (comp.has_header_symbol('stdio.h', 'FILE'), 'FILE structure not found') + assert (comp.has_header_symbol('limits.h', 'INT_MAX'), 'INT_MAX define not found') + assert (not comp.has_header_symbol('limits.h', 'guint64'), 'guint64 is not defined in limits.h') + assert (not comp.has_header_symbol('stdlib.h', 'FILE'), 'FILE structure is defined in stdio.h, not stdlib.h') + assert (not comp.has_header_symbol('stdlol.h', 'printf'), 'stdlol.h shouldn\'t exist') + assert (not comp.has_header_symbol('stdlol.h', 'int'), 'shouldn\'t be able to find "int" with invalid header') +endforeach + +# This is available on Glibc, Solaris & the BSD's, so just test for _GNU_SOURCE +# on Linux +if cc.has_function('ppoll') and host_machine.system() == 'linux' + assert (not cc.has_header_symbol('poll.h', 'ppoll'), 'ppoll should not be accessible without _GNU_SOURCE') + assert (cc.has_header_symbol('poll.h', 'ppoll', prefix : '#define _GNU_SOURCE'), 'ppoll should be accessible with _GNU_SOURCE') +endif + +assert (cpp.has_header_symbol('iostream', 'std::iostream'), 'iostream not found in iostream.h') +assert (cpp.has_header_symbol('vector', 'std::vector'), 'vector not found in vector.h') +assert (not cpp.has_header_symbol('limits.h', 'std::iostream'), 'iostream should not be defined in limits.h') + +# Cross compilation and boost do not mix. +if not meson.is_cross_build() + boost = dependency('boost', required : false) + if boost.found() + assert (cpp.has_header_symbol('boost/math/quaternion.hpp', 'boost::math::quaternion', dependencies : boost), 'quaternion not found') + else + assert (not cpp.has_header_symbol('boost/math/quaternion.hpp', 'boost::math::quaternion', dependencies : boost), 'quaternion found?!') + endif +endif diff --git a/meson/test cases/common/104 has arg/meson.build b/meson/test cases/common/104 has arg/meson.build new file mode 100644 index 000000000..ba0731111 --- /dev/null +++ b/meson/test cases/common/104 has arg/meson.build @@ -0,0 +1,60 @@ +project('has arg', 'c', 'cpp') + +cc = meson.get_compiler('c') +cpp = meson.get_compiler('cpp') + +if cc.get_id() == 'msvc' + is_arg = '/O2' + useless = '/DFOO' +else + is_arg = '-O2' + useless = '-DFOO' +endif + +isnt_arg = '-fiambroken' + +assert(cc.has_argument(is_arg), 'Arg that should have worked does not work.') +assert(not cc.has_argument(isnt_arg), 'Arg that should be broken is not.') + +assert(cpp.has_argument(is_arg), 'Arg that should have worked does not work.') +assert(not cpp.has_argument(isnt_arg), 'Arg that should be broken is not.') + +assert(cc.get_supported_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.') +assert(cpp.get_supported_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.') + +# Have useless at the end to ensure that the search goes from front to back. +l1 = cc.first_supported_argument([isnt_arg, is_arg, isnt_arg, useless]) +l2 = cc.first_supported_argument(isnt_arg, isnt_arg, isnt_arg) + +assert(l1.length() == 1, 'First supported returned wrong result.') +assert(l1.get(0) == is_arg, 'First supported returned wrong argument.') +assert(l2.length() == 0, 'First supported did not return empty array.') + +l1 = cpp.first_supported_argument([isnt_arg, is_arg, isnt_arg, useless]) +l2 = cpp.first_supported_argument(isnt_arg, isnt_arg, isnt_arg) + +assert(l1.length() == 1, 'First supported returned wrong result.') +assert(l1.get(0) == is_arg, 'First supported returned wrong argument.') +assert(l2.length() == 0, 'First supported did not return empty array.') + +if cc.get_id() == 'gcc' + pre_arg = '-Wformat' + # NOTE: We have special handling for -Wno-foo args because gcc silently + # ignores unknown -Wno-foo args unless you pass -Werror, so for this test, we + # pass it as two separate arguments. + anti_pre_arg = ['-W', 'no-format'] + arg = '-Werror=format-security' + assert(not cc.has_multi_arguments([anti_pre_arg, arg]), 'Arg that should be broken is not.') + assert(cc.has_multi_arguments(pre_arg), 'Arg that should have worked does not work.') + assert(cc.has_multi_arguments([pre_arg, arg]), 'Arg that should have worked does not work.') + # Test that gcc correctly errors out on unknown -Wno flags + assert(not cc.has_argument('-Wno-lol-meson-test-flags'), 'should error out on unknown -Wno args') + assert(not cc.has_multi_arguments(['-Wno-pragmas', '-Wno-lol-meson-test-flags']), 'should error out even if some -Wno args are valid') +endif + +if cc.get_id() == 'clang' and cc.version().version_compare('<=4.0.0') + # 4.0.0 does not support -fpeel-loops. Newer versions may. + # Please adjust above version number as new versions of clang are released. + notyet_arg = '-fpeel-loops' + assert(not cc.has_argument(notyet_arg), 'Arg that should be broken (unless clang added support recently) is not.') +endif diff --git a/meson/test cases/common/105 generatorcustom/catter.py b/meson/test cases/common/105 generatorcustom/catter.py new file mode 100755 index 000000000..c2726723c --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/catter.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import sys + +output = sys.argv[-1] +inputs = sys.argv[1:-1] + +with open(output, 'w') as ofile: + ofile.write('#pragma once\n') + for i in inputs: + with open(i) as ifile: + content = ifile.read() + ofile.write(content) + ofile.write('\n') diff --git a/meson/test cases/common/105 generatorcustom/gen-resx.py b/meson/test cases/common/105 generatorcustom/gen-resx.py new file mode 100755 index 000000000..242a962d8 --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/gen-resx.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 + +import sys + +ofile = sys.argv[1] +num = sys.argv[2] + +with open(ofile, 'w') as f: + f.write(f'res{num}\n') diff --git a/meson/test cases/common/105 generatorcustom/gen.py b/meson/test cases/common/105 generatorcustom/gen.py new file mode 100755 index 000000000..1464008f9 --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/gen.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +import sys + +ifile = sys.argv[1] +ofile = sys.argv[2] + +with open(ifile) as f: + resname = f.readline().strip() + +templ = 'const char %s[] = "%s";\n' +with open(ofile, 'w') as f: + f.write(templ % (resname, resname)) diff --git a/meson/test cases/common/105 generatorcustom/main.c b/meson/test cases/common/105 generatorcustom/main.c new file mode 100644 index 000000000..153dc12cb --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/main.c @@ -0,0 +1,8 @@ +#include + +#include "alltogether.h" + +int main(void) { + printf("%s - %s - %s - %s\n", res1, res2, res3, res4); + return 0; +} diff --git a/meson/test cases/common/105 generatorcustom/meson.build b/meson/test cases/common/105 generatorcustom/meson.build new file mode 100644 index 000000000..2128d2164 --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/meson.build @@ -0,0 +1,28 @@ +project('generatorcustom', 'c') + +creator = find_program('gen.py') +catter = find_program('catter.py') +gen_resx = find_program('gen-resx.py') + +gen = generator(creator, + output: '@BASENAME@.h', + arguments : ['@INPUT@', '@OUTPUT@']) + +res3 = custom_target('gen-res3', + output : 'res3.txt', + command : [gen_resx, '@OUTPUT@', '3']) + +res4 = custom_target('gen-res4', + output : 'res4.txt', + command : [gen_resx, '@OUTPUT@', '4']) + +hs = gen.process('res1.txt', 'res2.txt', res3, res4[0]) + +allinone = custom_target('alltogether', + input : hs, + output : 'alltogether.h', + command : [catter, '@INPUT@', '@OUTPUT@']) + +proggie = executable('proggie', 'main.c', allinone) + +test('proggie', proggie) diff --git a/meson/test cases/common/105 generatorcustom/res1.txt b/meson/test cases/common/105 generatorcustom/res1.txt new file mode 100644 index 000000000..6487c56ba --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/res1.txt @@ -0,0 +1 @@ +res1 diff --git a/meson/test cases/common/105 generatorcustom/res2.txt b/meson/test cases/common/105 generatorcustom/res2.txt new file mode 100644 index 000000000..0a8879d51 --- /dev/null +++ b/meson/test cases/common/105 generatorcustom/res2.txt @@ -0,0 +1 @@ +res2 diff --git a/meson/test cases/common/106 multiple dir configure file/meson.build b/meson/test cases/common/106 multiple dir configure file/meson.build new file mode 100644 index 000000000..a4615fae8 --- /dev/null +++ b/meson/test cases/common/106 multiple dir configure file/meson.build @@ -0,0 +1,11 @@ +project('multiple dir configure file', 'c') + +subdir('subdir') + +configure_file(input : 'subdir/someinput.in', + output : 'outputhere', + copy: true) + +configure_file(input : cfile1, + output : '@BASENAME@', + copy: true) diff --git a/meson/test cases/common/106 multiple dir configure file/subdir/foo.txt b/meson/test cases/common/106 multiple dir configure file/subdir/foo.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/106 multiple dir configure file/subdir/meson.build b/meson/test cases/common/106 multiple dir configure file/subdir/meson.build new file mode 100644 index 000000000..503df964a --- /dev/null +++ b/meson/test cases/common/106 multiple dir configure file/subdir/meson.build @@ -0,0 +1,11 @@ +configure_file(input : 'someinput.in', + output : 'outputsubdir', + install : false, + copy: true) + +py3 = import('python3').find_python() + +cfile1 = configure_file(input : 'foo.txt', + output : 'foo.h.in', + capture : true, + command : [py3, '-c', 'print("#mesondefine FOO_BAR")']) diff --git a/meson/test cases/common/106 multiple dir configure file/subdir/someinput.in b/meson/test cases/common/106 multiple dir configure file/subdir/someinput.in new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/107 spaces backslash/asm output/meson.build b/meson/test cases/common/107 spaces backslash/asm output/meson.build new file mode 100644 index 000000000..b5f13f5ca --- /dev/null +++ b/meson/test cases/common/107 spaces backslash/asm output/meson.build @@ -0,0 +1,2 @@ +configure_file(output : 'blank.txt', configuration : configuration_data()) + diff --git a/meson/test cases/common/107 spaces backslash/comparer-end-notstring.c b/meson/test cases/common/107 spaces backslash/comparer-end-notstring.c new file mode 100644 index 000000000..8b8190ff8 --- /dev/null +++ b/meson/test cases/common/107 spaces backslash/comparer-end-notstring.c @@ -0,0 +1,20 @@ +#include "comparer.h" + +#ifndef COMPARER_INCLUDED +#error "comparer.h not included" +#endif + +/* This converts foo\\\\bar\\\\ to "foo\\bar\\" (string literal) */ +#define Q(x) #x +#define QUOTE(x) Q(x) + +#define COMPARE_WITH "foo\\bar\\" /* This is the literal `foo\bar\` */ + +int main(void) { + if(strcmp(QUOTE(DEF_WITH_BACKSLASH), COMPARE_WITH)) { + printf("Arg string is quoted incorrectly: %s instead of %s\n", + QUOTE(DEF_WITH_BACKSLASH), COMPARE_WITH); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/107 spaces backslash/comparer-end.c b/meson/test cases/common/107 spaces backslash/comparer-end.c new file mode 100644 index 000000000..8cff1b1b1 --- /dev/null +++ b/meson/test cases/common/107 spaces backslash/comparer-end.c @@ -0,0 +1,16 @@ +#include "comparer.h" + +#ifndef COMPARER_INCLUDED +#error "comparer.h not included" +#endif + +#define COMPARE_WITH "foo\\bar\\" /* This is `foo\bar\` */ + +int main(void) { + if (strcmp (DEF_WITH_BACKSLASH, COMPARE_WITH)) { + printf ("Arg string is quoted incorrectly: %s vs %s\n", + DEF_WITH_BACKSLASH, COMPARE_WITH); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/107 spaces backslash/comparer.c b/meson/test cases/common/107 spaces backslash/comparer.c new file mode 100644 index 000000000..7e3033e1e --- /dev/null +++ b/meson/test cases/common/107 spaces backslash/comparer.c @@ -0,0 +1,16 @@ +#include "comparer.h" + +#ifndef COMPARER_INCLUDED +#error "comparer.h not included" +#endif + +#define COMPARE_WITH "foo\\bar" /* This is the literal `foo\bar` */ + +int main(void) { + if (strcmp (DEF_WITH_BACKSLASH, COMPARE_WITH)) { + printf ("Arg string is quoted incorrectly: %s instead of %s\n", + DEF_WITH_BACKSLASH, COMPARE_WITH); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/107 spaces backslash/include/comparer.h b/meson/test cases/common/107 spaces backslash/include/comparer.h new file mode 100644 index 000000000..624d96c92 --- /dev/null +++ b/meson/test cases/common/107 spaces backslash/include/comparer.h @@ -0,0 +1,4 @@ +#include +#include + +#define COMPARER_INCLUDED diff --git a/meson/test cases/common/107 spaces backslash/meson.build b/meson/test cases/common/107 spaces backslash/meson.build new file mode 100644 index 000000000..d5904946e --- /dev/null +++ b/meson/test cases/common/107 spaces backslash/meson.build @@ -0,0 +1,28 @@ +project('comparer', 'c') + +# Added manually as a c_arg to test handling of include paths with backslashes +# and spaces. This is especially useful on Windows in vcxproj files since it +# stores include directories in a separate element that has its own +# context-specific escaping/quoting. +include_dir = meson.current_source_dir() + '/include' +default_c_args = ['-I' + include_dir] + +if meson.get_compiler('c').get_argument_syntax() == 'msvc' + default_c_args += ['/Faasm output\\'] + # Hack to create the 'asm output' directory in the builddir + subdir('asm output') +endif + +# Path can contain \. Here we're sending `"foo\bar"`. +test('backslash quoting', + executable('comparer', 'comparer.c', + c_args : default_c_args + ['-DDEF_WITH_BACKSLASH="foo\\bar"'])) +# Path can end in \ without any special quoting. Here we send `"foo\bar\"`. +test('backslash end quoting', + executable('comparer-end', 'comparer-end.c', + c_args : default_c_args + ['-DDEF_WITH_BACKSLASH="foo\\bar\\"'])) +# Path can (really) end in \ if we're not passing a string literal without any +# special quoting. Here we're sending `foo\bar\`. +test('backslash end quoting when not a string literal', + executable('comparer-end-notstring', 'comparer-end-notstring.c', + c_args : default_c_args + ['-DDEF_WITH_BACKSLASH=foo\\bar\\'])) diff --git a/meson/test cases/common/108 ternary/meson.build b/meson/test cases/common/108 ternary/meson.build new file mode 100644 index 000000000..7539d569f --- /dev/null +++ b/meson/test cases/common/108 ternary/meson.build @@ -0,0 +1,12 @@ +project('ternary operator', 'c') + +x = true +one = true ? 1 : error('False branch should not be evaluated') +two = false ? error('True branch should not be evaluated.') : 2 +three = '@0@'.format(x ? 'yes' : 'no') +four = [x ? '0' : '1'] + +assert(one == 1, 'Return value from ternary true is wrong.') +assert(two == 2, 'Return value from ternary false is wrong.') +assert(three == 'yes', 'Return value for ternary inside method call is wrong.') +assert(four == ['0'], 'Return value for ternary inside of list is wrong.') diff --git a/meson/test cases/common/109 custom target capture/data_source.txt b/meson/test cases/common/109 custom target capture/data_source.txt new file mode 100644 index 000000000..0c23cc0c3 --- /dev/null +++ b/meson/test cases/common/109 custom target capture/data_source.txt @@ -0,0 +1 @@ +This is a text only input file. diff --git a/meson/test cases/common/109 custom target capture/meson.build b/meson/test cases/common/109 custom target capture/meson.build new file mode 100644 index 000000000..58a69cacc --- /dev/null +++ b/meson/test cases/common/109 custom target capture/meson.build @@ -0,0 +1,24 @@ +project('custom target', 'c') + +python3 = import('python3').find_python() + +# Note that this will not add a dependency to the compiler executable. +# Code will not be rebuilt if it changes. +comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py') + +mytarget = custom_target('bindat', + output : 'data.dat', + input : 'data_source.txt', + capture : true, + command : [python3, comp, '@INPUT@'], + install : true, + install_dir : 'subdir' +) + +ct_output_exists = '''import os, sys +if not os.path.exists(sys.argv[1]): + print("could not find {!r} in {!r}".format(sys.argv[1], os.getcwd())) + sys.exit(1) +''' + +test('capture-wrote', python3, args : ['-c', ct_output_exists, mytarget]) diff --git a/meson/test cases/common/109 custom target capture/my_compiler.py b/meson/test cases/common/109 custom target capture/my_compiler.py new file mode 100755 index 000000000..b60722a5e --- /dev/null +++ b/meson/test cases/common/109 custom target capture/my_compiler.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import sys + +if __name__ == '__main__': + if len(sys.argv) != 2: + print(sys.argv[0], 'input_file') + sys.exit(1) + with open(sys.argv[1]) as f: + ifile = f.read() + if ifile != 'This is a text only input file.\n': + print('Malformed input') + sys.exit(1) + print('This is a binary output file.') diff --git a/meson/test cases/common/109 custom target capture/test.json b/meson/test cases/common/109 custom target capture/test.json new file mode 100644 index 000000000..ba66b024a --- /dev/null +++ b/meson/test cases/common/109 custom target capture/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"type": "file", "file": "usr/subdir/data.dat"} + ] +} diff --git a/meson/test cases/common/11 subdir/meson.build b/meson/test cases/common/11 subdir/meson.build new file mode 100644 index 000000000..bda1f9040 --- /dev/null +++ b/meson/test cases/common/11 subdir/meson.build @@ -0,0 +1,2 @@ +project('subdir test', 'c') +subdir('subdir') diff --git a/meson/test cases/common/11 subdir/subdir/meson.build b/meson/test cases/common/11 subdir/subdir/meson.build new file mode 100644 index 000000000..d84ec13b0 --- /dev/null +++ b/meson/test cases/common/11 subdir/subdir/meson.build @@ -0,0 +1,2 @@ +prog = executable('prog', 'prog.c') +test('subdirprog', prog) diff --git a/meson/test cases/common/11 subdir/subdir/prog.c b/meson/test cases/common/11 subdir/subdir/prog.c new file mode 100644 index 000000000..78f2de106 --- /dev/null +++ b/meson/test cases/common/11 subdir/subdir/prog.c @@ -0,0 +1 @@ +int main(void) { return 0; } diff --git a/meson/test cases/common/110 allgenerate/converter.py b/meson/test cases/common/110 allgenerate/converter.py new file mode 100755 index 000000000..f8e2ca05a --- /dev/null +++ b/meson/test cases/common/110 allgenerate/converter.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +import sys + +ifile = sys.argv[1] +ofile = sys.argv[2] + +open(ofile, 'w').write(open(ifile).read()) diff --git a/meson/test cases/common/110 allgenerate/foobar.cpp.in b/meson/test cases/common/110 allgenerate/foobar.cpp.in new file mode 100644 index 000000000..32e1261f1 --- /dev/null +++ b/meson/test cases/common/110 allgenerate/foobar.cpp.in @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am a program.\n"); + return 0; +} diff --git a/meson/test cases/common/110 allgenerate/meson.build b/meson/test cases/common/110 allgenerate/meson.build new file mode 100644 index 000000000..049e8498c --- /dev/null +++ b/meson/test cases/common/110 allgenerate/meson.build @@ -0,0 +1,20 @@ +# Must have two languages here to exercise linker language +# selection bug +project('all sources generated', 'c', 'cpp') + +comp = find_program('converter.py') + +g = generator(comp, + output : '@BASENAME@', + arguments : ['@INPUT@', '@OUTPUT@']) + +c = g.process('foobar.cpp.in') + +prog = executable('genexe', c) + +c2 = custom_target('c2gen', + output : '@BASENAME@', + input : 'foobar.cpp.in', + command : [comp, '@INPUT@', '@OUTPUT@']) + +prog2 = executable('genexe2', c2) diff --git a/meson/test cases/common/111 pathjoin/meson.build b/meson/test cases/common/111 pathjoin/meson.build new file mode 100644 index 000000000..d3957dd77 --- /dev/null +++ b/meson/test cases/common/111 pathjoin/meson.build @@ -0,0 +1,24 @@ +project('pathjoin', 'c') + +# Test string-args form since that is the canonical way +assert(join_paths('foo') == 'foo', 'Single argument join is broken') +assert(join_paths('foo', 'bar') == 'foo/bar', 'Path joining is broken') +assert(join_paths('foo', 'bar', 'baz') == 'foo/bar/baz', 'Path joining is broken') +assert(join_paths('/foo', 'bar') == '/foo/bar', 'Path joining is broken') +assert(join_paths('foo', '/bar') == '/bar', 'Absolute path joining is broken') +assert(join_paths('/foo', '/bar') == '/bar', 'Absolute path joining is broken') + +# Test array form since people are using that too +assert(join_paths(['foo']) == 'foo', 'Single argument join is broken') +assert(join_paths(['foo', 'bar']) == 'foo/bar', 'Path joining is broken') +assert(join_paths(['foo', 'bar', 'baz']) == 'foo/bar/baz', 'Path joining is broken') +assert(join_paths(['/foo', 'bar']) == '/foo/bar', 'Path joining is broken') +assert(join_paths(['foo', '/bar']) == '/bar', 'Absolute path joining is broken') +assert(join_paths(['/foo', '/bar']) == '/bar', 'Absolute path joining is broken') + +# Division operator should do the same as join_paths +assert('foo' / 'bar' == 'foo/bar', 'Path division is broken') +assert('foo' /'bar' /'baz' == 'foo/bar/baz', 'Path division is broken') +assert('/foo' / 'bar' == '/foo/bar', 'Path division is broken') +assert('foo' / '/bar' == '/bar', 'Absolute path division is broken') +assert('/foo' / '/bar' == '/bar', 'Absolute path division is broken') diff --git a/meson/test cases/common/112 subdir subproject/meson.build b/meson/test cases/common/112 subdir subproject/meson.build new file mode 100644 index 000000000..54ecfe07a --- /dev/null +++ b/meson/test cases/common/112 subdir subproject/meson.build @@ -0,0 +1,2 @@ +project('proj', 'c') +subdir('prog') diff --git a/meson/test cases/common/112 subdir subproject/prog/meson.build b/meson/test cases/common/112 subdir subproject/prog/meson.build new file mode 100644 index 000000000..360b5f59d --- /dev/null +++ b/meson/test cases/common/112 subdir subproject/prog/meson.build @@ -0,0 +1,5 @@ +subproject('sub') +libSub = dependency('sub', fallback: ['sub', 'libSub']) + +exe = executable('prog', 'prog.c', dependencies: libSub) +test('subdir subproject', exe) diff --git a/meson/test cases/common/112 subdir subproject/prog/prog.c b/meson/test cases/common/112 subdir subproject/prog/prog.c new file mode 100644 index 000000000..9035ff114 --- /dev/null +++ b/meson/test cases/common/112 subdir subproject/prog/prog.c @@ -0,0 +1,5 @@ +#include + +int main(void) { + return sub(); +} diff --git a/meson/test cases/common/112 subdir subproject/subprojects/sub/meson.build b/meson/test cases/common/112 subdir subproject/subprojects/sub/meson.build new file mode 100644 index 000000000..94e9eecd0 --- /dev/null +++ b/meson/test cases/common/112 subdir subproject/subprojects/sub/meson.build @@ -0,0 +1,3 @@ +project('sub', 'c') +lib = static_library('sub', 'sub.c') +libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib) diff --git a/meson/test cases/common/112 subdir subproject/subprojects/sub/sub.c b/meson/test cases/common/112 subdir subproject/subprojects/sub/sub.c new file mode 100644 index 000000000..e748ac750 --- /dev/null +++ b/meson/test cases/common/112 subdir subproject/subprojects/sub/sub.c @@ -0,0 +1,5 @@ +#include "sub.h" + +int sub(void) { + return 0; +} diff --git a/meson/test cases/common/112 subdir subproject/subprojects/sub/sub.h b/meson/test cases/common/112 subdir subproject/subprojects/sub/sub.h new file mode 100644 index 000000000..2b59a3a32 --- /dev/null +++ b/meson/test cases/common/112 subdir subproject/subprojects/sub/sub.h @@ -0,0 +1,6 @@ +#ifndef SUB_H +#define SUB_H + +int sub(void); + +#endif diff --git a/meson/test cases/common/113 interpreter copy mutable var on assignment/meson.build b/meson/test cases/common/113 interpreter copy mutable var on assignment/meson.build new file mode 100644 index 000000000..8b15357ad --- /dev/null +++ b/meson/test cases/common/113 interpreter copy mutable var on assignment/meson.build @@ -0,0 +1,20 @@ +project('foo', 'c') + +a = configuration_data() +a.set('HELLO', 1) + +b = a + +assert(a.has('HELLO'), 'Original config data should be set on a') +assert(b.has('HELLO'), 'Original config data should be set on copy') + +configure_file(output : 'b.h', configuration : b) + +# This should still work, as we didn't use the original above but a copy! +a.set('WORLD', 1) + +assert(a.has('WORLD'), 'New config data should have been set') +assert(not b.has('WORLD'), 'New config data set should not affect var copied earlier') + +configure_file(output : 'a.h', configuration : a) + diff --git a/meson/test cases/common/114 skip/meson.build b/meson/test cases/common/114 skip/meson.build new file mode 100644 index 000000000..1adedb6fd --- /dev/null +++ b/meson/test cases/common/114 skip/meson.build @@ -0,0 +1,4 @@ +project('skip', 'c') + +error('MESON_SKIP_TEST this test is always skipped.') + diff --git a/meson/test cases/common/115 subproject project arguments/exe.c b/meson/test cases/common/115 subproject project arguments/exe.c new file mode 100644 index 000000000..e8f2271a3 --- /dev/null +++ b/meson/test cases/common/115 subproject project arguments/exe.c @@ -0,0 +1,27 @@ +#ifndef PROJECT_OPTION +#error +#endif + +#ifndef PROJECT_OPTION_1 +#error +#endif + +#ifndef GLOBAL_ARGUMENT +#error +#endif + +#ifdef SUBPROJECT_OPTION +#error +#endif + +#ifdef OPTION_CPP +#error +#endif + +#ifndef PROJECT_OPTION_C_CPP +#error +#endif + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/115 subproject project arguments/exe.cpp b/meson/test cases/common/115 subproject project arguments/exe.cpp new file mode 100644 index 000000000..9cfaec4a2 --- /dev/null +++ b/meson/test cases/common/115 subproject project arguments/exe.cpp @@ -0,0 +1,28 @@ +#ifdef PROJECT_OPTION +#error +#endif + +#ifdef PROJECT_OPTION_1 +#error +#endif + +#ifdef GLOBAL_ARGUMENT +#error +#endif + +#ifdef SUBPROJECT_OPTION +#error +#endif + +#ifndef PROJECT_OPTION_CPP +#error +#endif + +#ifndef PROJECT_OPTION_C_CPP +#error +#endif + +int main(void) { + return 0; +} + diff --git a/meson/test cases/common/115 subproject project arguments/meson.build b/meson/test cases/common/115 subproject project arguments/meson.build new file mode 100644 index 000000000..90d4c05f3 --- /dev/null +++ b/meson/test cases/common/115 subproject project arguments/meson.build @@ -0,0 +1,17 @@ +project('project options tester', 'c', 'cpp', + version : '2.3.4', + license : 'mylicense') + +add_global_arguments('-DGLOBAL_ARGUMENT', language: 'c') +add_project_arguments('-DPROJECT_OPTION', language: 'c') +add_project_arguments('-DPROJECT_OPTION_CPP', language: 'cpp') +add_project_arguments('-DPROJECT_OPTION_C_CPP', language: ['c', 'cpp']) + +sub = subproject('subexe', version : '1.0.0') + +add_project_arguments('-DPROJECT_OPTION_1', language: 'c') + +e = executable('exe', 'exe.c') +e = executable('execpp', 'exe.cpp') +test('exetest', e) +test('execpptest', e) diff --git a/meson/test cases/common/115 subproject project arguments/subprojects/subexe/meson.build b/meson/test cases/common/115 subproject project arguments/subprojects/subexe/meson.build new file mode 100644 index 000000000..ef141dc0a --- /dev/null +++ b/meson/test cases/common/115 subproject project arguments/subprojects/subexe/meson.build @@ -0,0 +1,13 @@ +project('subproject', 'c', + version : '1.0.0', + license : ['sublicense1', 'sublicense2']) + +if not meson.is_subproject() + error('Claimed to be master project even though we are a subproject.') +endif + +assert(meson.project_name() == 'subproject', 'Incorrect subproject name') + +add_project_arguments('-DSUBPROJECT_OPTION', language: 'c') +e = executable('subexe', 'subexe.c') +test('subexetest', e) diff --git a/meson/test cases/common/115 subproject project arguments/subprojects/subexe/subexe.c b/meson/test cases/common/115 subproject project arguments/subprojects/subexe/subexe.c new file mode 100644 index 000000000..bd5316db9 --- /dev/null +++ b/meson/test cases/common/115 subproject project arguments/subprojects/subexe/subexe.c @@ -0,0 +1,27 @@ +#ifdef PROJECT_OPTION +#error +#endif + +#ifdef PROJECT_OPTION_1 +#error +#endif + +#ifdef PROJECT_OPTION_C_CPP +#error +#endif + +#ifndef GLOBAL_ARGUMENT +#error +#endif + +#ifndef SUBPROJECT_OPTION +#error +#endif + +#ifdef OPTION_CPP +#error +#endif + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/116 test skip/meson.build b/meson/test cases/common/116 test skip/meson.build new file mode 100644 index 000000000..568527f18 --- /dev/null +++ b/meson/test cases/common/116 test skip/meson.build @@ -0,0 +1,4 @@ +project('test skip', 'c') + +exe_test_skip = executable('test_skip', 'test_skip.c') +test('test_skip', exe_test_skip) diff --git a/meson/test cases/common/116 test skip/test_skip.c b/meson/test cases/common/116 test skip/test_skip.c new file mode 100644 index 000000000..59c134bb9 --- /dev/null +++ b/meson/test cases/common/116 test skip/test_skip.c @@ -0,0 +1,3 @@ +int main(void) { + return 77; +} diff --git a/meson/test cases/common/117 shared module/meson.build b/meson/test cases/common/117 shared module/meson.build new file mode 100644 index 000000000..936c8396f --- /dev/null +++ b/meson/test cases/common/117 shared module/meson.build @@ -0,0 +1,40 @@ +project('shared module', 'c') + +c = meson.get_compiler('c') + +# Windows UWP doesn't support the ToolHelp API we use in this test to emulate +# runtime symbol resolution. +if host_machine.system() == 'windows' + if not c.compiles(''' +#include +#include + +HANDLE func(void) +{ + return CreateToolhelp32Snapshot(TH32CS_SNAPMODULE, 0); +} +''') + error('MESON_SKIP_TEST Windows UWP does not support this test.') + endif +endif + +dl = c.find_library('dl', required : false) +l = shared_library('runtime', 'runtime.c') +# Do NOT link the module with the runtime library. This +# is a common approach for plugins that are only used +# with dlopen. Any symbols are resolved dynamically +# at runtime. This requires extra help on Windows, so +# should be avoided unless really necessary. +m = shared_module('mymodule', 'module.c') +e = executable('prog', 'prog.c', + link_with : l, export_dynamic : true, dependencies : dl) +test('import test', e, args : m) + +# Same as above, but module created with build_target() +m2 = build_target('mymodule2', 'module.c', target_type: 'shared_module') +test('import test 2', e, args : m2) + +# Shared module that does not export any symbols +shared_module('nosyms', 'nosyms.c', + install : true, + install_dir : join_paths(get_option('libdir'), 'modules')) diff --git a/meson/test cases/common/117 shared module/module.c b/meson/test cases/common/117 shared module/module.c new file mode 100644 index 000000000..5dd26d7ef --- /dev/null +++ b/meson/test cases/common/117 shared module/module.c @@ -0,0 +1,96 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +#if defined(_WIN32) || defined(__CYGWIN__) + +#include + +typedef int (*fptr) (void); + +#ifdef __CYGWIN__ + +#include + +fptr find_any_f (const char *name) { + return (fptr) dlsym(RTLD_DEFAULT, name); +} +#else /* _WIN32 */ + +#include +#include + +static wchar_t* +win32_get_last_error (void) +{ + wchar_t *msg = NULL; + + FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER + | FORMAT_MESSAGE_IGNORE_INSERTS + | FORMAT_MESSAGE_FROM_SYSTEM, + NULL, GetLastError (), 0, + (LPWSTR) &msg, 0, NULL); + return msg; +} + +/* Unlike Linux and OS X, when a library is loaded, all the symbols aren't + * loaded into a single namespace. You must fetch the symbol by iterating over + * all loaded modules. Code for finding the function from any of the loaded + * modules is taken from gmodule.c in glib */ +fptr find_any_f (const char *name) { + fptr f; + HANDLE snapshot; + MODULEENTRY32 me32; + + snapshot = CreateToolhelp32Snapshot (TH32CS_SNAPMODULE, 0); + if (snapshot == (HANDLE) -1) { + wchar_t *msg = win32_get_last_error(); + printf("Could not get snapshot: %S\n", msg); + return 0; + } + + me32.dwSize = sizeof (me32); + + f = NULL; + if (Module32First (snapshot, &me32)) { + do { + if ((f = (fptr) GetProcAddress (me32.hModule, name)) != NULL) + break; + } while (Module32Next (snapshot, &me32)); + } + + CloseHandle (snapshot); + return f; +} +#endif + +int DLL_PUBLIC func(void) { + fptr f; + + f = find_any_f ("func_from_language_runtime"); + if (f != NULL) + return f(); + printf ("Could not find function\n"); + return 1; +} + +#else +/* + * Shared modules often have references to symbols that are not defined + * at link time, but which will be provided from deps of the executable that + * dlopens it. We need to make sure that this works, i.e. that we do + * not pass -Wl,--no-undefined when linking modules. + */ +int func_from_language_runtime(void); + +int DLL_PUBLIC func(void) { + return func_from_language_runtime(); +} +#endif diff --git a/meson/test cases/common/117 shared module/nosyms.c b/meson/test cases/common/117 shared module/nosyms.c new file mode 100644 index 000000000..3432b1c46 --- /dev/null +++ b/meson/test cases/common/117 shared module/nosyms.c @@ -0,0 +1,4 @@ +static int +func_not_exported (void) { + return 99; +} diff --git a/meson/test cases/common/117 shared module/prog.c b/meson/test cases/common/117 shared module/prog.c new file mode 100644 index 000000000..47411856d --- /dev/null +++ b/meson/test cases/common/117 shared module/prog.c @@ -0,0 +1,103 @@ + +#include + +int func_from_language_runtime(void); +typedef int (*fptr) (void); + +#ifdef _WIN32 + +#include + +static wchar_t* +win32_get_last_error (void) +{ + wchar_t *msg = NULL; + + FormatMessageW (FORMAT_MESSAGE_ALLOCATE_BUFFER + | FORMAT_MESSAGE_IGNORE_INSERTS + | FORMAT_MESSAGE_FROM_SYSTEM, + NULL, GetLastError (), 0, + (LPWSTR) &msg, 0, NULL); + return msg; +} + +int main(int argc, char **argv) +{ + HINSTANCE handle; + fptr importedfunc; + int expected, actual; + int ret = 1; + if(argc==0) {}; + + handle = LoadLibraryA (argv[1]); + if (!handle) { + wchar_t *msg = win32_get_last_error (); + printf ("Could not open %s: %S\n", argv[1], msg); + goto nohandle; + } + + importedfunc = (fptr) GetProcAddress (handle, "func"); + if (importedfunc == NULL) { + wchar_t *msg = win32_get_last_error (); + printf ("Could not find 'func': %S\n", msg); + goto out; + } + + actual = importedfunc (); + expected = func_from_language_runtime (); + if (actual != expected) { + printf ("Got %i instead of %i\n", actual, expected); + goto out; + } + + ret = 0; +out: + FreeLibrary (handle); +nohandle: + return ret; +} + +#else + +#include +#include + +int main(int argc, char **argv) { + void *dl; + fptr importedfunc; + int expected, actual; + char *error; + int ret = 1; + if(argc==0) {}; + + dlerror(); + dl = dlopen(argv[1], RTLD_LAZY); + error = dlerror(); + if(error) { + printf("Could not open %s: %s\n", argv[1], error); + goto nodl; + } + + importedfunc = (fptr) dlsym(dl, "func"); + if (importedfunc == NULL) { + printf ("Could not find 'func'\n"); + goto out; + } + + assert(importedfunc != func_from_language_runtime); + + actual = (*importedfunc)(); + expected = func_from_language_runtime (); + if (actual != expected) { + printf ("Got %i instead of %i\n", actual, expected); + goto out; + } + + ret = 0; +out: + dlclose(dl); +nodl: + return ret; +} + +#endif diff --git a/meson/test cases/common/117 shared module/runtime.c b/meson/test cases/common/117 shared module/runtime.c new file mode 100644 index 000000000..03bde8614 --- /dev/null +++ b/meson/test cases/common/117 shared module/runtime.c @@ -0,0 +1,19 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +/* + * This file pretends to be a language runtime that supports extension + * modules. + */ + +int DLL_PUBLIC func_from_language_runtime(void) { + return 86; +} diff --git a/meson/test cases/common/117 shared module/test.json b/meson/test cases/common/117 shared module/test.json new file mode 100644 index 000000000..33bfeff07 --- /dev/null +++ b/meson/test cases/common/117 shared module/test.json @@ -0,0 +1,7 @@ +{ + "installed": [ + {"type": "expr", "file": "usr/lib/modules/libnosyms?so"}, + {"type": "implibempty", "file": "usr/lib/modules/libnosyms"}, + {"type": "pdb", "file": "usr/lib/modules/nosyms"} + ] +} diff --git a/meson/test cases/common/118 llvm ir and assembly/main.c b/meson/test cases/common/118 llvm ir and assembly/main.c new file mode 100644 index 000000000..35e8ed623 --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/main.c @@ -0,0 +1,13 @@ +#include + +unsigned square_unsigned (unsigned a); + +int main(void) +{ + unsigned int ret = square_unsigned (2); + if (ret != 4) { + printf("Got %u instead of 4\n", ret); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/118 llvm ir and assembly/main.cpp b/meson/test cases/common/118 llvm ir and assembly/main.cpp new file mode 100644 index 000000000..aac3cbfef --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/main.cpp @@ -0,0 +1,15 @@ +#include + +extern "C" { + unsigned square_unsigned (unsigned a); +} + +int main (void) +{ + unsigned int ret = square_unsigned (2); + if (ret != 4) { + printf("Got %u instead of 4\n", ret); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/118 llvm ir and assembly/meson.build b/meson/test cases/common/118 llvm ir and assembly/meson.build new file mode 100644 index 000000000..2aec7f07f --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/meson.build @@ -0,0 +1,79 @@ +project('llvm-ir', 'c', 'cpp') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: asm not supported with the Xcode backend. Patches welcome.') +endif + +cpu = host_machine.cpu_family() +supported_cpus = ['arm', 'aarch64', 'x86', 'x86_64'] + +foreach lang : ['c', 'cpp'] + cc = meson.get_compiler(lang) + cc_id = cc.get_id() + ## Build a trivial executable with mixed LLVM IR source + if cc_id == 'clang' + e = executable('square_ir_' + lang, 'square.ll', 'main.' + lang) + test('test IR square' + lang, e) + endif + ## Build a trivial executable with mixed assembly source + # This also helps test whether cc.symbols_have_underscore_prefix() is working + # properly. This is done by assembling some assembly into an object that will + # provide the unsigned_squared() symbol to main.c/cpp. This requires the + # C symbol mangling to be known in advance. + if cc.symbols_have_underscore_prefix() + uscore_args = ['-DMESON_TEST__UNDERSCORE_SYMBOL'] + message('underscore is prefixed') + else + uscore_args = [] + message('underscore is NOT prefixed') + endif + square_base = 'square-' + cpu + square_impl = square_base + '.S' + # MSVC cannot directly compile assembly files, so we pass it through the + # cl.exe pre-processor first and then assemble it with ml.exe or armasm.exe + # assembler. Then we can link it into the executable. + if cc.get_argument_syntax() == 'msvc' + cl = cc.cmd_array() + if cpu == 'x86' + asmcmd = 'ml' + elif cpu == 'x86_64' + asmcmd = 'ml64' + elif cpu == 'aarch64' + asmcmd = 'armasm64' + elif cpu == 'arm' + asmcmd = 'armasm' + else + error('Unsupported cpu family: "' + cpu + '"') + endif + ml = find_program(asmcmd, required: false) + if not ml.found() + error('MESON_SKIP_TEST: Microsoft assembler (ml/armasm) not found') + endif + # Preprocess file (ml doesn't support pre-processing) + # Force the intput to be C (/Tc) because ICL otherwise assumes it's an object (.obj) file + preproc_name = lang + square_base + '.i' + square_preproc = custom_target(lang + square_impl + 'preproc', + input : square_impl, + output : preproc_name, + command : [cl, '/nologo', '/EP', '/P', '/Fi' + preproc_name, '/Tc', '@INPUT@'] + uscore_args) + # Use assembled object file instead of the original .S assembly source + if asmcmd.startswith('armasm') + square_impl = custom_target(lang + square_impl, + input : square_preproc, + output : lang + square_base + '.obj', + command : [ml, '-nologo', '-o', '@OUTPUT@', '@INPUT@']) + else + square_impl = custom_target(lang + square_impl, + input : square_preproc, + output : lang + square_base + '.obj', + command : [ml, '/nologo', '/safeseh', '/Fo', '@OUTPUT@', '/c', '@INPUT@']) + endif + endif + if supported_cpus.contains(cpu) + e = executable('square_asm_' + lang, square_impl, 'main.' + lang, + c_args : uscore_args, cpp_args : uscore_args) + test('test ASM square' + lang, e) + elif cc_id != 'clang' + error('MESON_SKIP_TEST: Unsupported cpu: "' + cpu + '", and LLVM not found') + endif +endforeach diff --git a/meson/test cases/common/118 llvm ir and assembly/square-aarch64.S b/meson/test cases/common/118 llvm ir and assembly/square-aarch64.S new file mode 100644 index 000000000..02f1a1299 --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/square-aarch64.S @@ -0,0 +1,29 @@ +#include "symbol-underscore.h" + +#ifdef _MSC_VER + + AREA _TEXT, ARM64, CODE, READONLY + + EXPORT SYMBOL_NAME(square_unsigned) +SYMBOL_NAME(square_unsigned) PROC + mul x1, x0, x0 + mov x0, x1 + ret +SYMBOL_NAME(square_unsigned) ENDP + + END + +#else + +.text +.globl SYMBOL_NAME(square_unsigned) +# ifdef __linux__ +.type square_unsigned, %function +#endif + +SYMBOL_NAME(square_unsigned): + mul x1, x0, x0 + mov x0, x1 + ret + +#endif diff --git a/meson/test cases/common/118 llvm ir and assembly/square-arm.S b/meson/test cases/common/118 llvm ir and assembly/square-arm.S new file mode 100644 index 000000000..aea3f1f61 --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/square-arm.S @@ -0,0 +1,29 @@ +#include "symbol-underscore.h" + +#ifdef _MSC_VER + + AREA _TEXT, ARM, CODE, READONLY + + EXPORT SYMBOL_NAME(square_unsigned) +SYMBOL_NAME(square_unsigned) PROC + mul r1, r0, r0 + mov r0, r1 + mov pc, lr +SYMBOL_NAME(square_unsigned) ENDP + + END + +#else + +.text +.globl SYMBOL_NAME(square_unsigned) +# ifdef __linux__ +.type square_unsigned, %function +#endif + +SYMBOL_NAME(square_unsigned): + mul r1, r0, r0 + mov r0, r1 + mov pc, lr + +#endif diff --git a/meson/test cases/common/118 llvm ir and assembly/square-x86.S b/meson/test cases/common/118 llvm ir and assembly/square-x86.S new file mode 100644 index 000000000..18284c1a6 --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/square-x86.S @@ -0,0 +1,36 @@ +#include "symbol-underscore.h" + +/* This sadly doesn't test the symbol underscore stuff. I can't figure out how + * to not use an automatic stdcall mechanism and do everything manually. */ +#ifdef _MSC_VER + +.386 +.MODEL FLAT, C + +PUBLIC square_unsigned +_TEXT SEGMENT + +square_unsigned PROC var1:DWORD + mov eax, var1 + imul eax, eax + ret + +square_unsigned ENDP + +_TEXT ENDS +END + +#else + +.text +.globl SYMBOL_NAME(square_unsigned) +# ifdef __linux__ +.type square_unsigned, %function +#endif + +SYMBOL_NAME(square_unsigned): + movl 4(%esp), %eax + imull %eax, %eax + retl + +#endif diff --git a/meson/test cases/common/118 llvm ir and assembly/square-x86_64.S b/meson/test cases/common/118 llvm ir and assembly/square-x86_64.S new file mode 100644 index 000000000..5678d00a3 --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/square-x86_64.S @@ -0,0 +1,37 @@ +#include "symbol-underscore.h" + +#ifdef _MSC_VER /* MSVC on Windows */ + +PUBLIC SYMBOL_NAME(square_unsigned) +_TEXT SEGMENT + +SYMBOL_NAME(square_unsigned) PROC + mov eax, ecx + imul eax, eax + ret +SYMBOL_NAME(square_unsigned) ENDP + +_TEXT ENDS +END + +#else + +.text +.globl SYMBOL_NAME(square_unsigned) +# ifdef __linux__ +.type square_unsigned, %function +#endif + +# if defined(_WIN32) || defined(__CYGWIN__) /* msabi */ +SYMBOL_NAME(square_unsigned): + imull %ecx, %ecx + movl %ecx, %eax + retq +# else /* sysvabi */ +SYMBOL_NAME(square_unsigned): + imull %edi, %edi + movl %edi, %eax + retq +# endif + +#endif diff --git a/meson/test cases/common/118 llvm ir and assembly/square.ll b/meson/test cases/common/118 llvm ir and assembly/square.ll new file mode 100644 index 000000000..7c321aa82 --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/square.ll @@ -0,0 +1,4 @@ +define i32 @square_unsigned(i32 %a) { + %1 = mul i32 %a, %a + ret i32 %1 +} diff --git a/meson/test cases/common/118 llvm ir and assembly/symbol-underscore.h b/meson/test cases/common/118 llvm ir and assembly/symbol-underscore.h new file mode 100644 index 000000000..d0f3ef9cc --- /dev/null +++ b/meson/test cases/common/118 llvm ir and assembly/symbol-underscore.h @@ -0,0 +1,5 @@ +#if defined(MESON_TEST__UNDERSCORE_SYMBOL) +# define SYMBOL_NAME(name) _##name +#else +# define SYMBOL_NAME(name) name +#endif diff --git a/meson/test cases/common/119 cpp and asm/meson.build b/meson/test cases/common/119 cpp and asm/meson.build new file mode 100644 index 000000000..99713d485 --- /dev/null +++ b/meson/test cases/common/119 cpp and asm/meson.build @@ -0,0 +1,33 @@ +project('c++ and assembly test') +add_languages('cpp') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: asm not supported with the Xcode backend. Patches welcome.') +endif + +cpp = meson.get_compiler('cpp') +cpu = host_machine.cpu_family() + +supported_cpus = ['arm', 'x86', 'x86_64'] + +if not supported_cpus.contains(cpu) + error('MESON_SKIP_TEST unsupported cpu:' + cpu) +endif + +if cpp.symbols_have_underscore_prefix() + add_project_arguments('-DMESON_TEST__UNDERSCORE_SYMBOL', language : 'cpp') +endif + +sources = ['trivial.cc'] +# If the compiler cannot compile assembly, don't use it +if not ['msvc', 'clang-cl', 'intel-cl'].contains(meson.get_compiler('cpp').get_id()) + sources += ['retval-' + cpu + '.S'] + cpp_args = ['-DUSE_ASM'] + message('Using ASM') +else + cpp_args = ['-DNO_USE_ASM'] +endif + +exe = executable('trivialprog', sources, + cpp_args : cpp_args) +test('runtest', exe) diff --git a/meson/test cases/common/119 cpp and asm/retval-arm.S b/meson/test cases/common/119 cpp and asm/retval-arm.S new file mode 100644 index 000000000..a8923624a --- /dev/null +++ b/meson/test cases/common/119 cpp and asm/retval-arm.S @@ -0,0 +1,11 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(get_retval) +# ifdef __linux__ +.type get_retval, %function +#endif + +SYMBOL_NAME(get_retval): + mov r0, #0 + mov pc, lr diff --git a/meson/test cases/common/119 cpp and asm/retval-x86.S b/meson/test cases/common/119 cpp and asm/retval-x86.S new file mode 100644 index 000000000..f9e819070 --- /dev/null +++ b/meson/test cases/common/119 cpp and asm/retval-x86.S @@ -0,0 +1,11 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(get_retval) +# ifdef __linux__ +.type get_retval, %function +#endif + +SYMBOL_NAME(get_retval): + xorl %eax, %eax + retl diff --git a/meson/test cases/common/119 cpp and asm/retval-x86_64.S b/meson/test cases/common/119 cpp and asm/retval-x86_64.S new file mode 100644 index 000000000..1a5f3eb23 --- /dev/null +++ b/meson/test cases/common/119 cpp and asm/retval-x86_64.S @@ -0,0 +1,11 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(get_retval) +# ifdef __linux__ +.type get_retval, %function +#endif + +SYMBOL_NAME(get_retval): + xorl %eax, %eax + retq diff --git a/meson/test cases/common/119 cpp and asm/symbol-underscore.h b/meson/test cases/common/119 cpp and asm/symbol-underscore.h new file mode 100644 index 000000000..d0f3ef9cc --- /dev/null +++ b/meson/test cases/common/119 cpp and asm/symbol-underscore.h @@ -0,0 +1,5 @@ +#if defined(MESON_TEST__UNDERSCORE_SYMBOL) +# define SYMBOL_NAME(name) _##name +#else +# define SYMBOL_NAME(name) name +#endif diff --git a/meson/test cases/common/119 cpp and asm/trivial.cc b/meson/test cases/common/119 cpp and asm/trivial.cc new file mode 100644 index 000000000..19d5e944d --- /dev/null +++ b/meson/test cases/common/119 cpp and asm/trivial.cc @@ -0,0 +1,16 @@ +#include + +extern "C" { + int get_retval(void); +} + +int main(void) { + std::cout << "C++ seems to be working." << std::endl; +#if defined(USE_ASM) + return get_retval(); +#elif defined(NO_USE_ASM) + return 0; +#else + #error "Forgot to pass asm define" +#endif +} diff --git a/meson/test cases/common/12 data/datafile.dat b/meson/test cases/common/12 data/datafile.dat new file mode 100644 index 000000000..ff3104ba1 --- /dev/null +++ b/meson/test cases/common/12 data/datafile.dat @@ -0,0 +1 @@ +this is a data file diff --git a/meson/test cases/common/12 data/etcfile.dat b/meson/test cases/common/12 data/etcfile.dat new file mode 100644 index 000000000..93db8cb06 --- /dev/null +++ b/meson/test cases/common/12 data/etcfile.dat @@ -0,0 +1 @@ +This goes into /etc/etcfile.dat diff --git a/meson/test cases/common/12 data/fileobject_datafile.dat b/meson/test cases/common/12 data/fileobject_datafile.dat new file mode 100644 index 000000000..872aa5a8f --- /dev/null +++ b/meson/test cases/common/12 data/fileobject_datafile.dat @@ -0,0 +1 @@ +This is a data file that is installed via a File object. diff --git a/meson/test cases/common/12 data/meson.build b/meson/test cases/common/12 data/meson.build new file mode 100644 index 000000000..b5b1e8a62 --- /dev/null +++ b/meson/test cases/common/12 data/meson.build @@ -0,0 +1,24 @@ +project('data install test', 'c', + default_options : ['install_umask=preserve']) +install_data(sources : 'datafile.dat', install_dir : 'share/progname') +# Some file in /etc that is only read-write by root; add a sticky bit for testing +install_data(sources : 'etcfile.dat', install_dir : '/etc', install_mode : 'rw------T') +# Some script that needs to be executable by the group +install_data('runscript.sh', + install_dir : get_option('bindir'), + install_mode : ['rwxr-sr-x', 'root', 0]) +install_data(files('fileobject_datafile.dat'), + install_dir : 'share/progname', + install_mode : [false, false, 0]) + +install_data(files('somefile.txt')) + +subdir('vanishing') + +install_data(sources : 'vanishing/vanishing2.dat', install_dir : 'share/progname') + +install_data(sources : 'to_be_renamed_1.txt', rename : 'renamed file.txt') +install_data(sources : ['vanishing/to_be_renamed_2.txt', 'to_be_renamed_3.txt'], + install_dir : 'share/renamed', + rename : ['renamed 2.txt', 'renamed 3.txt']) +install_data(sources : 'to_be_renamed_4.txt', rename : 'some/nested/path.txt') diff --git a/meson/test cases/common/12 data/runscript.sh b/meson/test cases/common/12 data/runscript.sh new file mode 100644 index 000000000..8bc5ca6ce --- /dev/null +++ b/meson/test cases/common/12 data/runscript.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +echo "Runscript" diff --git a/meson/test cases/common/12 data/somefile.txt b/meson/test cases/common/12 data/somefile.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/12 data/test.json b/meson/test cases/common/12 data/test.json new file mode 100644 index 000000000..f392e9a03 --- /dev/null +++ b/meson/test cases/common/12 data/test.json @@ -0,0 +1,15 @@ +{ + "installed": [ + {"type": "file", "file": "usr/share/progname/datafile.dat"}, + {"type": "file", "file": "usr/share/progname/fileobject_datafile.dat"}, + {"type": "file", "file": "usr/share/progname/vanishing.dat"}, + {"type": "file", "file": "usr/share/progname/vanishing2.dat"}, + {"type": "file", "file": "usr/share/data install test/renamed file.txt"}, + {"type": "file", "file": "usr/share/data install test/somefile.txt"}, + {"type": "file", "file": "usr/share/data install test/some/nested/path.txt"}, + {"type": "file", "file": "usr/share/renamed/renamed 2.txt"}, + {"type": "file", "file": "usr/share/renamed/renamed 3.txt"}, + {"type": "file", "file": "etc/etcfile.dat"}, + {"type": "file", "file": "usr/bin/runscript.sh"} + ] +} diff --git a/meson/test cases/common/12 data/to_be_renamed_1.txt b/meson/test cases/common/12 data/to_be_renamed_1.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/12 data/to_be_renamed_3.txt b/meson/test cases/common/12 data/to_be_renamed_3.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/12 data/to_be_renamed_4.txt b/meson/test cases/common/12 data/to_be_renamed_4.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/12 data/vanishing/meson.build b/meson/test cases/common/12 data/vanishing/meson.build new file mode 100644 index 000000000..1a27137c8 --- /dev/null +++ b/meson/test cases/common/12 data/vanishing/meson.build @@ -0,0 +1 @@ +install_data(sources : 'vanishing.dat', install_dir : 'share/progname') diff --git a/meson/test cases/common/12 data/vanishing/to_be_renamed_2.txt b/meson/test cases/common/12 data/vanishing/to_be_renamed_2.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/12 data/vanishing/vanishing.dat b/meson/test cases/common/12 data/vanishing/vanishing.dat new file mode 100644 index 000000000..b7d06090e --- /dev/null +++ b/meson/test cases/common/12 data/vanishing/vanishing.dat @@ -0,0 +1 @@ +This is a data file to be installed in a subdirectory. diff --git a/meson/test cases/common/12 data/vanishing/vanishing2.dat b/meson/test cases/common/12 data/vanishing/vanishing2.dat new file mode 100644 index 000000000..99c923b88 --- /dev/null +++ b/meson/test cases/common/12 data/vanishing/vanishing2.dat @@ -0,0 +1,4 @@ +This is a data file to be installed in a subdirectory. + +It is installed from a different subdir to test that the +installer strips the source tree dir prefix. diff --git a/meson/test cases/common/120 extract all shared library/extractor.h b/meson/test cases/common/120 extract all shared library/extractor.h new file mode 100644 index 000000000..cfb7ff6d6 --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/extractor.h @@ -0,0 +1,6 @@ +#pragma once + +int func1(void); +int func2(void); +int func3(void); +int func4(void); diff --git a/meson/test cases/common/120 extract all shared library/four.c b/meson/test cases/common/120 extract all shared library/four.c new file mode 100644 index 000000000..f67a85e68 --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/four.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func4(void) { + return 4; +} diff --git a/meson/test cases/common/120 extract all shared library/func1234.def b/meson/test cases/common/120 extract all shared library/func1234.def new file mode 100644 index 000000000..d62c08d78 --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/func1234.def @@ -0,0 +1,5 @@ +EXPORTS + func1 + func2 + func3 + func4 diff --git a/meson/test cases/common/120 extract all shared library/meson.build b/meson/test cases/common/120 extract all shared library/meson.build new file mode 100644 index 000000000..340c031b7 --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/meson.build @@ -0,0 +1,14 @@ +project('extract all', 'c', 'cpp') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: Xcode backend does not handle libraries with only objects, not sources.') +endif + +a = static_library('a', 'one.c', 'two.c') +b = static_library('b', 'three.c', 'four.c') +c = shared_library('c', + objects : [a.extract_all_objects(), b.extract_all_objects()], + vs_module_defs : 'func1234.def') + +e = executable('proggie', 'prog.c', link_with : c) +test('extall', e) diff --git a/meson/test cases/common/120 extract all shared library/one.c b/meson/test cases/common/120 extract all shared library/one.c new file mode 100644 index 000000000..152a1455d --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/one.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func1(void) { + return 1; +} diff --git a/meson/test cases/common/120 extract all shared library/prog.c b/meson/test cases/common/120 extract all shared library/prog.c new file mode 100644 index 000000000..de0cc7f8e --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/prog.c @@ -0,0 +1,10 @@ +#include"extractor.h" +#include + +int main(void) { + if((1+2+3+4) != (func1() + func2() + func3() + func4())) { + printf("Arithmetic is fail.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/120 extract all shared library/three.c b/meson/test cases/common/120 extract all shared library/three.c new file mode 100644 index 000000000..24604ed72 --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/three.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func3(void) { + return 3; +} diff --git a/meson/test cases/common/120 extract all shared library/two.c b/meson/test cases/common/120 extract all shared library/two.c new file mode 100644 index 000000000..800cd2dfb --- /dev/null +++ b/meson/test cases/common/120 extract all shared library/two.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func2(void) { + return 2; +} diff --git a/meson/test cases/common/121 object only target/meson.build b/meson/test cases/common/121 object only target/meson.build new file mode 100644 index 000000000..c3c4e52ce --- /dev/null +++ b/meson/test cases/common/121 object only target/meson.build @@ -0,0 +1,51 @@ +project('object generator', 'c') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST object-only libraries not supported in Xcode. Patches welcome.') +endif + +# FIXME: Note that this will not add a dependency to the compiler executable. +# Code will not be rebuilt if it changes. +comp = find_program('obj_generator.py') + +if host_machine.system() == 'windows' + ext = '.obj' +else + ext = '.o' +endif + +cc = meson.get_compiler('c').cmd_array().get(-1) + +# Generate an object file with configure_file to mimic prebuilt objects +# provided by the source tree +source1 = configure_file(input : 'source.c', + output : 'source' + ext, + command : [comp, cc, files('source.c'), + join_paths(meson.current_build_dir(), 'source' + ext)]) + +obj = static_library('obj', objects : source1) + +# Generate an object file manually. +gen = generator(comp, + output : '@BASENAME@' + ext, + arguments : [cc, '@INPUT@', '@OUTPUT@']) + +generated = gen.process(['source2.c']) + +shr = shared_library('shr', generated, + vs_module_defs : 'source2.def') + +# Generate an object file with indexed OUTPUT replacement. +gen2 = generator(comp, + output : '@BASENAME@' + ext, + arguments : [cc, '@INPUT@', '@OUTPUT0@']) +generated2 = gen2.process(['source3.c']) + +stc = static_library('stc', generated2) + +subdir('objdir') + +e = executable('prog', 'prog.c', link_with : [obj, shr, stc, subdirfilebuilt_obj, subdirfile_obj, subdirstr_obj], + install : true) + +test('objgen', e) diff --git a/meson/test cases/common/121 object only target/obj_generator.py b/meson/test cases/common/121 object only target/obj_generator.py new file mode 100755 index 000000000..afdbc09ad --- /dev/null +++ b/meson/test cases/common/121 object only target/obj_generator.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +# Mimic a binary that generates an object file (e.g. windres). + +import sys, subprocess + +if __name__ == '__main__': + if len(sys.argv) != 4: + print(sys.argv[0], 'compiler input_file output_file') + sys.exit(1) + compiler = sys.argv[1] + ifile = sys.argv[2] + ofile = sys.argv[3] + if compiler.endswith('cl'): + cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile] + elif sys.platform == 'sunos5': + cmd = [compiler, '-fpic', '-c', ifile, '-o', ofile] + else: + cmd = [compiler, '-c', ifile, '-o', ofile] + sys.exit(subprocess.call(cmd)) diff --git a/meson/test cases/common/121 object only target/objdir/meson.build b/meson/test cases/common/121 object only target/objdir/meson.build new file mode 100644 index 000000000..631c1a1ff --- /dev/null +++ b/meson/test cases/common/121 object only target/objdir/meson.build @@ -0,0 +1,27 @@ + +#mesonlib.File built +source4 = configure_file(input : 'source4.c', + output : 'source4' + ext, + command : [comp, cc, files('source4.c'), + join_paths(meson.current_build_dir(), 'source4' + ext)]) + +subdirfilebuilt_obj = static_library('subdirfilebuilt_obj', objects : source4) + + +#mesonlib.File not built +configure_file(input : 'source5.c', + output : 'source5' + ext, + command : [comp, cc, files('source5.c'), + join_paths(meson.current_build_dir(), 'source5' + ext)]) + +subdirfile_obj = static_library('subdirfile_obj', objects : files(meson.current_build_dir()/'source5' + ext)) + + +#str +configure_file(input : 'source6.c', + output : 'source6' + ext, + command : [comp, cc, files('source6.c'), + join_paths(meson.current_build_dir(), 'source6' + ext)]) + + +subdirstr_obj = static_library('subdirstr_obj', objects : meson.current_build_dir()/'source6' + ext) diff --git a/meson/test cases/common/121 object only target/objdir/source4.c b/meson/test cases/common/121 object only target/objdir/source4.c new file mode 100644 index 000000000..83f4fab81 --- /dev/null +++ b/meson/test cases/common/121 object only target/objdir/source4.c @@ -0,0 +1,3 @@ +int func4_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/121 object only target/objdir/source5.c b/meson/test cases/common/121 object only target/objdir/source5.c new file mode 100644 index 000000000..c512fc310 --- /dev/null +++ b/meson/test cases/common/121 object only target/objdir/source5.c @@ -0,0 +1,3 @@ +int func5_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/121 object only target/objdir/source6.c b/meson/test cases/common/121 object only target/objdir/source6.c new file mode 100644 index 000000000..adcf2cd45 --- /dev/null +++ b/meson/test cases/common/121 object only target/objdir/source6.c @@ -0,0 +1,3 @@ +int func6_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/121 object only target/prog.c b/meson/test cases/common/121 object only target/prog.c new file mode 100644 index 000000000..a27663bd3 --- /dev/null +++ b/meson/test cases/common/121 object only target/prog.c @@ -0,0 +1,11 @@ +int func1_in_obj(void); +int func2_in_obj(void); +int func3_in_obj(void); +int func4_in_obj(void); +int func5_in_obj(void); +int func6_in_obj(void); + +int main(void) { + return func1_in_obj() + func2_in_obj() + func3_in_obj() + + func4_in_obj() + func5_in_obj() + func6_in_obj(); +} diff --git a/meson/test cases/common/121 object only target/source.c b/meson/test cases/common/121 object only target/source.c new file mode 100644 index 000000000..1dc08e168 --- /dev/null +++ b/meson/test cases/common/121 object only target/source.c @@ -0,0 +1,3 @@ +int func1_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/121 object only target/source2.c b/meson/test cases/common/121 object only target/source2.c new file mode 100644 index 000000000..8024b9714 --- /dev/null +++ b/meson/test cases/common/121 object only target/source2.c @@ -0,0 +1,3 @@ +int func2_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/121 object only target/source2.def b/meson/test cases/common/121 object only target/source2.def new file mode 100644 index 000000000..a993ab8ca --- /dev/null +++ b/meson/test cases/common/121 object only target/source2.def @@ -0,0 +1,2 @@ +EXPORTS + func2_in_obj diff --git a/meson/test cases/common/121 object only target/source3.c b/meson/test cases/common/121 object only target/source3.c new file mode 100644 index 000000000..c4362c4d6 --- /dev/null +++ b/meson/test cases/common/121 object only target/source3.c @@ -0,0 +1,3 @@ +int func3_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/121 object only target/test.json b/meson/test cases/common/121 object only target/test.json new file mode 100644 index 000000000..135300de5 --- /dev/null +++ b/meson/test cases/common/121 object only target/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/prog"}, + {"type": "pdb", "file": "usr/bin/prog"} + ] +} diff --git a/meson/test cases/common/122 no buildincdir/include/header.h b/meson/test cases/common/122 no buildincdir/include/header.h new file mode 100644 index 000000000..1170ee36a --- /dev/null +++ b/meson/test cases/common/122 no buildincdir/include/header.h @@ -0,0 +1,3 @@ +#pragma once + +int foobar(void); diff --git a/meson/test cases/common/122 no buildincdir/meson.build b/meson/test cases/common/122 no buildincdir/meson.build new file mode 100644 index 000000000..53f1a7f58 --- /dev/null +++ b/meson/test cases/common/122 no buildincdir/meson.build @@ -0,0 +1,14 @@ +project('nobuilddir', 'c', + default_options : ['werror=true', 'buildtype=plain']) + +cc = meson.get_compiler('c') + +incwarg = '-Wmissing-include-dirs' + +if cc.has_argument(incwarg) + executable('prog', 'prog.c', + c_args : incwarg, + include_directories : include_directories('include')) +else + error('MESON_SKIP_TEST compiler does not support bad inc dir argument.') +endif diff --git a/meson/test cases/common/122 no buildincdir/prog.c b/meson/test cases/common/122 no buildincdir/prog.c new file mode 100644 index 000000000..b356f6550 --- /dev/null +++ b/meson/test cases/common/122 no buildincdir/prog.c @@ -0,0 +1,5 @@ +#include"header.h" + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/123 custom target directory install/docgen.py b/meson/test cases/common/123 custom target directory install/docgen.py new file mode 100644 index 000000000..97a3f906c --- /dev/null +++ b/meson/test cases/common/123 custom target directory install/docgen.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import os +import sys + +out = sys.argv[1] + +try: + os.mkdir(out) +except FileExistsError: + pass + +for name in ('a', 'b', 'c'): + with open(os.path.join(out, name + '.html'), 'w') as f: + f.write(name) diff --git a/meson/test cases/common/123 custom target directory install/meson.build b/meson/test cases/common/123 custom target directory install/meson.build new file mode 100644 index 000000000..ada9ae119 --- /dev/null +++ b/meson/test cases/common/123 custom target directory install/meson.build @@ -0,0 +1,9 @@ +project('custom-target-dir-install', 'c') + +docgen = find_program('docgen.py') + +custom_target('docgen', + output : 'html', + command : [docgen, '@OUTPUT@'], + install : true, + install_dir : join_paths(get_option('datadir'), 'doc/testpkgname')) diff --git a/meson/test cases/common/123 custom target directory install/test.json b/meson/test cases/common/123 custom target directory install/test.json new file mode 100644 index 000000000..c7eebf58f --- /dev/null +++ b/meson/test cases/common/123 custom target directory install/test.json @@ -0,0 +1,7 @@ +{ + "installed": [ + {"type": "file", "file": "usr/share/doc/testpkgname/html/a.html"}, + {"type": "file", "file": "usr/share/doc/testpkgname/html/b.html"}, + {"type": "file", "file": "usr/share/doc/testpkgname/html/c.html"} + ] +} diff --git a/meson/test cases/common/124 dependency file generation/main .c b/meson/test cases/common/124 dependency file generation/main .c new file mode 100644 index 000000000..03b2213bb --- /dev/null +++ b/meson/test cases/common/124 dependency file generation/main .c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/124 dependency file generation/meson.build b/meson/test cases/common/124 dependency file generation/meson.build new file mode 100644 index 000000000..b5ee47bf2 --- /dev/null +++ b/meson/test cases/common/124 dependency file generation/meson.build @@ -0,0 +1,14 @@ +project('dep file gen', 'c') + +cc_id = meson.get_compiler('c').get_id() +cc_ver = meson.get_compiler('c').version() + +if cc_id == 'intel' or (cc_id == 'lcc' and cc_ver.version_compare('<=1.23.08')) + # ICC and LCC <= 1.23.08 do not escape spaces in paths in the dependency file, so Ninja + # (correctly) thinks that the rule has multiple outputs and errors out: + # 'depfile has multiple output paths' + error('MESON_SKIP_TEST: Skipping test because your compiler is known to generate broken dependency files') +endif + +e = executable('main file', 'main .c') +test('test it', e) diff --git a/meson/test cases/common/125 configure file in generator/inc/confdata.in b/meson/test cases/common/125 configure file in generator/inc/confdata.in new file mode 100644 index 000000000..e44cdea20 --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/inc/confdata.in @@ -0,0 +1 @@ +@VALUE@ diff --git a/meson/test cases/common/125 configure file in generator/inc/meson.build b/meson/test cases/common/125 configure file in generator/inc/meson.build new file mode 100644 index 000000000..05d2dcb8a --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/inc/meson.build @@ -0,0 +1,6 @@ +cdata = configuration_data() +cdata.set('VALUE', '42') + +cfile = configure_file(input : 'confdata.in', +output : 'confdata', +configuration : cdata) diff --git a/meson/test cases/common/125 configure file in generator/meson.build b/meson/test cases/common/125 configure file in generator/meson.build new file mode 100644 index 000000000..e1c26b6d1 --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/meson.build @@ -0,0 +1,4 @@ +project('conf file in generator', 'c') + +subdir('inc') +subdir('src') diff --git a/meson/test cases/common/125 configure file in generator/src/gen.py b/meson/test cases/common/125 configure file in generator/src/gen.py new file mode 100755 index 000000000..426d0b74c --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/src/gen.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +import sys + +ifile = sys.argv[1] +ofile = sys.argv[2] + +with open(ifile) as f: + resval = f.readline().strip() + +templ = '#define RESULT (%s)\n' +with open(ofile, 'w') as f: + f.write(templ % (resval, )) diff --git a/meson/test cases/common/125 configure file in generator/src/main.c b/meson/test cases/common/125 configure file in generator/src/main.c new file mode 100644 index 000000000..6329e4791 --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/src/main.c @@ -0,0 +1,17 @@ +#include + +#include"confdata.h" +#if RESULT != 42 +#error Configuration RESULT is not defined correctly +#endif + +#undef RESULT + +#include"source.h" +#if RESULT != 23 +#error Source RESULT is not defined correctly +#endif + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/125 configure file in generator/src/meson.build b/meson/test cases/common/125 configure file in generator/src/meson.build new file mode 100644 index 000000000..2fb804eec --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/src/meson.build @@ -0,0 +1,7 @@ +compiler = find_program('gen.py') +gen = generator(compiler, + output: '@BASENAME@.h', + arguments : ['@INPUT@', '@OUTPUT@']) +hs = gen.process(cfile, files('source')) + +executable('proggie', 'main.c', hs) diff --git a/meson/test cases/common/125 configure file in generator/src/source b/meson/test cases/common/125 configure file in generator/src/source new file mode 100644 index 000000000..409940768 --- /dev/null +++ b/meson/test cases/common/125 configure file in generator/src/source @@ -0,0 +1 @@ +23 diff --git a/meson/test cases/common/126 generated llvm ir/copyfile.py b/meson/test cases/common/126 generated llvm ir/copyfile.py new file mode 100644 index 000000000..ff42ac359 --- /dev/null +++ b/meson/test cases/common/126 generated llvm ir/copyfile.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/126 generated llvm ir/main.c b/meson/test cases/common/126 generated llvm ir/main.c new file mode 100644 index 000000000..35e8ed623 --- /dev/null +++ b/meson/test cases/common/126 generated llvm ir/main.c @@ -0,0 +1,13 @@ +#include + +unsigned square_unsigned (unsigned a); + +int main(void) +{ + unsigned int ret = square_unsigned (2); + if (ret != 4) { + printf("Got %u instead of 4\n", ret); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/126 generated llvm ir/meson.build b/meson/test cases/common/126 generated llvm ir/meson.build new file mode 100644 index 000000000..f10754a09 --- /dev/null +++ b/meson/test cases/common/126 generated llvm ir/meson.build @@ -0,0 +1,28 @@ +project('generated llvm ir', 'c') + +if meson.get_compiler('c').get_id() != 'clang' + error('MESON_SKIP_TEST: LLVM IR files can only be built with clang') +endif + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: LLMV ir not supported with the Xcode backend. Patches welcome.') +endif + +copy = find_program('copyfile.py') + +copygen = generator(copy, + arguments : ['@INPUT@', '@OUTPUT@'], + output : '@BASENAME@') + +l = library('square-gen', copygen.process('square.ll.in')) + +test('square-gen-test', executable('square-gen-test', 'main.c', link_with : l)) + +copyct = custom_target('square', + input : 'square.ll.in', + output : 'square.ll', + command : [copy, '@INPUT@', '@OUTPUT@']) + +l = library('square-ct', copyct) + +test('square-ct-test', executable('square-ct-test', 'main.c', link_with : l)) diff --git a/meson/test cases/common/126 generated llvm ir/square.ll.in b/meson/test cases/common/126 generated llvm ir/square.ll.in new file mode 100644 index 000000000..7c321aa82 --- /dev/null +++ b/meson/test cases/common/126 generated llvm ir/square.ll.in @@ -0,0 +1,4 @@ +define i32 @square_unsigned(i32 %a) { + %1 = mul i32 %a, %a + ret i32 %1 +} diff --git a/meson/test cases/common/127 generated assembly/copyfile.py b/meson/test cases/common/127 generated assembly/copyfile.py new file mode 100644 index 000000000..ff42ac359 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/copyfile.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/127 generated assembly/empty.c b/meson/test cases/common/127 generated assembly/empty.c new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/127 generated assembly/main.c b/meson/test cases/common/127 generated assembly/main.c new file mode 100644 index 000000000..fb38f9df4 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/main.c @@ -0,0 +1,16 @@ +#include + +#if defined(_WIN32) || defined(__CYGWIN__) + __declspec(dllimport) +#endif +unsigned square_unsigned (unsigned a); + +int main(void) +{ + unsigned int ret = square_unsigned (2); + if (ret != 4) { + printf("Got %u instead of 4\n", ret); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/127 generated assembly/meson.build b/meson/test cases/common/127 generated assembly/meson.build new file mode 100644 index 000000000..31a5f17d5 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/meson.build @@ -0,0 +1,66 @@ +project('generated assembly', 'c') + +cc = meson.get_compiler('c') + +if ['msvc', 'intel-cl'].contains(cc.get_id()) + error('MESON_SKIP_TEST: assembly files cannot be compiled directly by the compiler') +endif + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: asm not supported with the Xcode backend. Patches welcome.') +endif + + +crt_workaround = [] +if cc.get_linker_id() == 'lld-link' + # It seems that when building without a .c file, lld-link.exe + # misses the fact that it needs to include the c runtime to + # make a working .dll. So here we add an empty .c file to easily + # pull in crt. + crt_workaround += 'empty.c' + if host_machine.cpu_family() == 'x86' + # x86 assembly needs manual annotation to be compatible with + # Safe Exception Handlers (?) This assembly doesn't have such + # annotation, so just disable the feature. + add_project_link_arguments('/SAFESEH:NO', language : 'c') + endif +endif + +cpu = host_machine.cpu_family() +supported_cpus = ['arm', 'x86', 'x86_64'] + +if not supported_cpus.contains(cpu) + error('MESON_SKIP_TEST: unsupported cpu family: ' + cpu) +endif + +if cc.get_id() == 'clang-cl' and cc.version().version_compare('< 12.0.0') and cpu == 'arm' + # https://reviews.llvm.org/D89622 + error('MESON_SKIP_TEST: arm debug symbols not supported in clang-cl < 12.0.0') +endif + +if cc.symbols_have_underscore_prefix() + add_project_arguments('-DMESON_TEST__UNDERSCORE_SYMBOL', language : 'c') +endif + +copy = find_program('copyfile.py') +output = 'square-@0@.S'.format(cpu) +input = output + '.in' + +copygen = generator(copy, + arguments : ['@INPUT@', '@OUTPUT@'], + output : '@BASENAME@') + +l = library('square-gen', crt_workaround + [copygen.process(input)], + vs_module_defs: 'square.def') + +test('square-gen-test', executable('square-gen-test', 'main.c', link_with : l)) + +copyct = custom_target('square', + input : input, + output : output, + command : [copy, '@INPUT@', '@OUTPUT@']) + +l = library('square-ct', crt_workaround + [copyct], + vs_module_defs: 'square.def') + +test('square-ct-test', executable('square-ct-test', 'main.c', link_with : l)) diff --git a/meson/test cases/common/127 generated assembly/square-arm.S.in b/meson/test cases/common/127 generated assembly/square-arm.S.in new file mode 100644 index 000000000..d2fb7ac20 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/square-arm.S.in @@ -0,0 +1,13 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(square_unsigned) +/* Only supported with GAS */ +# if defined(__linux__) || defined(__DragonFly__) || defined(__FreeBSD__) || defined(__NetBSD__) +.type square_unsigned,%function +#endif + +SYMBOL_NAME(square_unsigned): + mul r1, r0, r0 + mov r0, r1 + mov pc, lr diff --git a/meson/test cases/common/127 generated assembly/square-x86.S.in b/meson/test cases/common/127 generated assembly/square-x86.S.in new file mode 100644 index 000000000..1a48fc416 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/square-x86.S.in @@ -0,0 +1,34 @@ +#include "symbol-underscore.h" + +#if defined(_MSC_VER) && !defined(__clang__) + +.386 +.MODEL FLAT, C + +PUBLIC square_unsigned +_TEXT SEGMENT + +square_unsigned PROC var1:DWORD + mov eax, var1 + imul eax, eax + ret +square_unsigned ENDP + +_TEXT ENDS +END + +#else + +.text +.globl SYMBOL_NAME(square_unsigned) +/* Only supported with GAS */ +# if defined(__linux__) || defined(__DragonFly__) || defined(__FreeBSD__) || defined(__NetBSD__) +.type square_unsigned,@function +# endif + +SYMBOL_NAME(square_unsigned): + movl 4(%esp), %eax + imull %eax, %eax + retl + +#endif diff --git a/meson/test cases/common/127 generated assembly/square-x86_64.S.in b/meson/test cases/common/127 generated assembly/square-x86_64.S.in new file mode 100644 index 000000000..d50434115 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/square-x86_64.S.in @@ -0,0 +1,38 @@ +#include "symbol-underscore.h" + +#if defined(_MSC_VER) && !defined(__clang__) /* MSVC on Windows */ + +PUBLIC SYMBOL_NAME(square_unsigned) +_TEXT SEGMENT + +SYMBOL_NAME(square_unsigned) PROC + mov eax, ecx + imul eax, eax + ret +SYMBOL_NAME(square_unsigned) ENDP + +_TEXT ENDS +END + +#else + +.text +.globl SYMBOL_NAME(square_unsigned) +/* Only supported with GAS */ +# if defined(__linux__) || defined(__DragonFly__) || defined(__FreeBSD__) || defined(__NetBSD__) || defined(__sun) +.type square_unsigned,@function +# endif + +# if defined(_WIN32) || defined(__CYGWIN__) /* msabi */ +SYMBOL_NAME(square_unsigned): + imull %ecx, %ecx + movl %ecx, %eax + retq +# else /* sysvabi */ +SYMBOL_NAME(square_unsigned): + imull %edi, %edi + movl %edi, %eax + retq +# endif + +#endif diff --git a/meson/test cases/common/127 generated assembly/square.def b/meson/test cases/common/127 generated assembly/square.def new file mode 100644 index 000000000..79f3d65c0 --- /dev/null +++ b/meson/test cases/common/127 generated assembly/square.def @@ -0,0 +1,2 @@ +EXPORTS + square_unsigned diff --git a/meson/test cases/common/127 generated assembly/symbol-underscore.h b/meson/test cases/common/127 generated assembly/symbol-underscore.h new file mode 100644 index 000000000..d0f3ef9cc --- /dev/null +++ b/meson/test cases/common/127 generated assembly/symbol-underscore.h @@ -0,0 +1,5 @@ +#if defined(MESON_TEST__UNDERSCORE_SYMBOL) +# define SYMBOL_NAME(name) _##name +#else +# define SYMBOL_NAME(name) name +#endif diff --git a/meson/test cases/common/128 build by default targets in tests/main.c b/meson/test cases/common/128 build by default targets in tests/main.c new file mode 100644 index 000000000..03b2213bb --- /dev/null +++ b/meson/test cases/common/128 build by default targets in tests/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/128 build by default targets in tests/meson.build b/meson/test cases/common/128 build by default targets in tests/meson.build new file mode 100644 index 000000000..5cc505519 --- /dev/null +++ b/meson/test cases/common/128 build by default targets in tests/meson.build @@ -0,0 +1,23 @@ +project('unit-test', 'c', version : '1.0') + +write_file = find_program('write_file.py') + +# A test that consumes and verifies the output generated by a custom target. +# Should work even if target is not built by default. Makes sure that foo.out +# is actually created before the test command that uses foo_out is run. +foo_out = custom_target('foo.out', + output : 'foo.out', + command : [write_file, '@OUTPUT@']) + +# Also verify that a build_by_default : false BuildTarget added to a test is +# built before the test is run. +exe_out = executable('out', 'main.c', build_by_default : false) + +py_file_exists = '''import os, sys +if not os.path.exists(sys.argv[1]) or not os.path.exists(sys.argv[2]): + print("could not find {!r} or {!r} in {!r}" + "".format(sys.argv[1], sys.argv[2], os.getcwd())) + sys.exit(1)''' + +python = import('python3').find_python() +test('output-check', python, args : ['-c', py_file_exists, foo_out, exe_out]) diff --git a/meson/test cases/common/128 build by default targets in tests/write_file.py b/meson/test cases/common/128 build by default targets in tests/write_file.py new file mode 100644 index 000000000..ff9c224f0 --- /dev/null +++ b/meson/test cases/common/128 build by default targets in tests/write_file.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[1], 'w') as f: + f.write('Test') diff --git a/meson/test cases/common/129 build by default/checkexists.py b/meson/test cases/common/129 build by default/checkexists.py new file mode 100644 index 000000000..6664f7291 --- /dev/null +++ b/meson/test cases/common/129 build by default/checkexists.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +import os.path, sys + +invert = False +for path in sys.argv[1:]: + if path == '--not': + invert = True + elif not os.path.exists(path) ^ invert: + sys.exit(1) diff --git a/meson/test cases/common/129 build by default/foo.c b/meson/test cases/common/129 build by default/foo.c new file mode 100644 index 000000000..0322828dc --- /dev/null +++ b/meson/test cases/common/129 build by default/foo.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("Existentialism.\n"); + return 0; +} diff --git a/meson/test cases/common/129 build by default/meson.build b/meson/test cases/common/129 build by default/meson.build new file mode 100644 index 000000000..b28b6347c --- /dev/null +++ b/meson/test cases/common/129 build by default/meson.build @@ -0,0 +1,45 @@ +project('build on all', 'c') + +py3_mod = import('python3') +py3 = py3_mod.find_python() + +executable('fooprog', 'foo.c', + build_by_default : false, +) + +executable('barprog', 'foo.c', + build_by_default : false, + build_always : true, +) + +comp = files('mygen.py') +checkexists = files('checkexists.py') + +mytarget = custom_target('gendat1', + output : 'generated1.dat', + input : 'source.txt', + command : [py3] + comp + ['@INPUT@', '@OUTPUT@'], + build_by_default : true, +) + +mytarget = custom_target('gendat2', + output : 'generated2.dat', + input : 'source.txt', + command : [py3] + comp + ['@INPUT@', '@OUTPUT@'], + build_by_default : true, + build_always : false, +) + +ct1_output = join_paths(meson.build_root(), 'generated1.dat') +ct2_output = join_paths(meson.build_root(), 'generated2.dat') +exe1_output = join_paths(meson.build_root(), 'fooprog') +exe2_output = join_paths(meson.build_root(), 'barprog') + +if host_machine.system() == 'windows' + exe1_output += '.exe' + exe2_output += '.exe' +endif + +test('check-build-by-default', py3, + args : [checkexists, + ct1_output, ct2_output, '--not', exe1_output, exe2_output]) diff --git a/meson/test cases/common/129 build by default/mygen.py b/meson/test cases/common/129 build by default/mygen.py new file mode 100644 index 000000000..5a74153ee --- /dev/null +++ b/meson/test cases/common/129 build by default/mygen.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +import sys + +ifile = open(sys.argv[1]) +ofile = open(sys.argv[2], 'w') + +ofile.write(ifile.read()) diff --git a/meson/test cases/common/129 build by default/source.txt b/meson/test cases/common/129 build by default/source.txt new file mode 100644 index 000000000..3573f4b21 --- /dev/null +++ b/meson/test cases/common/129 build by default/source.txt @@ -0,0 +1 @@ +I am a bunch of text. diff --git a/meson/test cases/common/13 pch/c/meson.build b/meson/test cases/common/13 pch/c/meson.build new file mode 100644 index 000000000..6fba15bf4 --- /dev/null +++ b/meson/test cases/common/13 pch/c/meson.build @@ -0,0 +1,14 @@ +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +if cc_id == 'lcc' + error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.') +endif + +# PGI compiler only supports PCH for C++ +if cc_id == 'pgi' + subdir_done() +endif + +exe = executable('prog', 'prog.c', +c_pch : 'pch/prog.h') diff --git a/meson/test cases/common/13 pch/c/pch/prog.h b/meson/test cases/common/13 pch/c/pch/prog.h new file mode 100644 index 000000000..c89890a77 --- /dev/null +++ b/meson/test cases/common/13 pch/c/pch/prog.h @@ -0,0 +1,6 @@ +#ifndef PROG_H +// Header guards for PCH confuse msvc in some situations. +// Using them here makes sure we handle this correctly. +#define PROG_H +#include +#endif diff --git a/meson/test cases/common/13 pch/c/prog.c b/meson/test cases/common/13 pch/c/prog.c new file mode 100644 index 000000000..124bba063 --- /dev/null +++ b/meson/test cases/common/13 pch/c/prog.c @@ -0,0 +1,10 @@ +// No includes here, they need to come from the PCH + +void func(void) { + fprintf(stdout, "This is a function that fails if stdio is not #included.\n"); +} + +int main(void) { + return 0; +} + diff --git a/meson/test cases/common/13 pch/cpp/meson.build b/meson/test cases/common/13 pch/cpp/meson.build new file mode 100644 index 000000000..b01cd58f3 --- /dev/null +++ b/meson/test cases/common/13 pch/cpp/meson.build @@ -0,0 +1 @@ +exe = executable('prog', 'prog.cc', cpp_pch : 'pch/prog.hh') diff --git a/meson/test cases/common/13 pch/cpp/pch/prog.hh b/meson/test cases/common/13 pch/cpp/pch/prog.hh new file mode 100644 index 000000000..751cc4a71 --- /dev/null +++ b/meson/test cases/common/13 pch/cpp/pch/prog.hh @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/13 pch/cpp/prog.cc b/meson/test cases/common/13 pch/cpp/prog.cc new file mode 100644 index 000000000..0ba8519dc --- /dev/null +++ b/meson/test cases/common/13 pch/cpp/prog.cc @@ -0,0 +1,11 @@ +// Note: if using PGI compilers, you will need to add #include "prog.hh" +// even though you're using precompiled headers. +void func(void) { + std::cout << "This is a function that fails to compile if iostream is not included." + << std::endl; +} + +int main(void) { + func(); + return 0; +} diff --git a/meson/test cases/common/13 pch/generated/gen_custom.py b/meson/test cases/common/13 pch/generated/gen_custom.py new file mode 100644 index 000000000..650e03c2b --- /dev/null +++ b/meson/test cases/common/13 pch/generated/gen_custom.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 +import sys + +with open(sys.argv[1], 'w') as f: + f.write("#define FOO 0") diff --git a/meson/test cases/common/13 pch/generated/gen_generator.py b/meson/test cases/common/13 pch/generated/gen_generator.py new file mode 100644 index 000000000..a245e7aef --- /dev/null +++ b/meson/test cases/common/13 pch/generated/gen_generator.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 +import sys + +with open(sys.argv[1]) as f: + content = f.read() +with open(sys.argv[2], 'w') as f: + f.write(content) diff --git a/meson/test cases/common/13 pch/generated/generated_generator.in b/meson/test cases/common/13 pch/generated/generated_generator.in new file mode 100644 index 000000000..1a00ebd21 --- /dev/null +++ b/meson/test cases/common/13 pch/generated/generated_generator.in @@ -0,0 +1 @@ +#define BAR 0 diff --git a/meson/test cases/common/13 pch/generated/meson.build b/meson/test cases/common/13 pch/generated/meson.build new file mode 100644 index 000000000..ba06bcea8 --- /dev/null +++ b/meson/test cases/common/13 pch/generated/meson.build @@ -0,0 +1,22 @@ +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +if cc_id == 'lcc' + error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.') +endif + +# PGI compiler only supports PCH for C++ +if cc_id == 'pgi' + subdir_done() +endif + +generated_customTarget = custom_target('makeheader', + output: 'generated_customTarget.h', + command : [find_program('gen_custom.py'), '@OUTPUT0@']) + +generated_generator = generator(find_program('gen_generator.py'), + output: '@BASENAME@.h', + arguments: ['@INPUT@', '@OUTPUT@']) + +exe = executable('prog', 'prog.c', generated_customTarget, generated_generator.process('generated_generator.in'), + c_pch: 'pch/prog.h') diff --git a/meson/test cases/common/13 pch/generated/pch/prog.h b/meson/test cases/common/13 pch/generated/pch/prog.h new file mode 100644 index 000000000..15fec38ce --- /dev/null +++ b/meson/test cases/common/13 pch/generated/pch/prog.h @@ -0,0 +1,2 @@ +#include "generated_customTarget.h" +#include "generated_generator.h" diff --git a/meson/test cases/common/13 pch/generated/prog.c b/meson/test cases/common/13 pch/generated/prog.c new file mode 100644 index 000000000..6765ac102 --- /dev/null +++ b/meson/test cases/common/13 pch/generated/prog.c @@ -0,0 +1,6 @@ +// No includes here, they need to come from the PCH + +int main(void) { + return FOO + BAR; +} + diff --git a/meson/test cases/common/13 pch/meson.build b/meson/test cases/common/13 pch/meson.build new file mode 100644 index 000000000..5ca9ab1d8 --- /dev/null +++ b/meson/test cases/common/13 pch/meson.build @@ -0,0 +1,22 @@ +project('pch test', 'c', 'cpp', + meson_version: '>= 0.46.0') + +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +if cc_id == 'pgi' + error('MESON_SKIP_TEST: PGI compiler does support PCH, however, PGI cannot tolerate spaces in the --pch_dir path and Meson run_project_tests.py uses spaces in temporary build path names. If this test is run individually with no spaces in build path, it will pass.') +endif + +subdir('c') +subdir('cpp') +subdir('generated') +subdir('userDefined') +subdir('withIncludeDirectories') +subdir('withIncludeFile') + +if meson.backend() == 'xcode' + warning('Xcode backend only supports one precompiled header per target. Skipping "mixed" which has various precompiled headers.') +else + subdir('mixed') +endif diff --git a/meson/test cases/common/13 pch/mixed/func.c b/meson/test cases/common/13 pch/mixed/func.c new file mode 100644 index 000000000..620eca133 --- /dev/null +++ b/meson/test cases/common/13 pch/mixed/func.c @@ -0,0 +1,7 @@ +void tmp_func(void) { + fprintf(stdout, "This is a function that fails if stdio is not #included.\n"); +} + +int cfunc(void) { + return 0; +} diff --git a/meson/test cases/common/13 pch/mixed/main.cc b/meson/test cases/common/13 pch/mixed/main.cc new file mode 100644 index 000000000..432120329 --- /dev/null +++ b/meson/test cases/common/13 pch/mixed/main.cc @@ -0,0 +1,10 @@ +extern "C" int cfunc(); + +void func(void) { + std::cout << "This is a function that fails to compile if iostream is not included." + << std::endl; +} + +int main(void) { + return cfunc(); +} diff --git a/meson/test cases/common/13 pch/mixed/meson.build b/meson/test cases/common/13 pch/mixed/meson.build new file mode 100644 index 000000000..266e7a575 --- /dev/null +++ b/meson/test cases/common/13 pch/mixed/meson.build @@ -0,0 +1,14 @@ +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +# PGI compiler only supports PCH for C++ +if cc_id == 'pgi' + subdir_done() +endif + +exe = executable( + 'prog', + files('main.cc', 'func.c'), + c_pch : ['pch/func.h'], + cpp_pch : ['pch/main.h'], +) diff --git a/meson/test cases/common/13 pch/mixed/pch/func.h b/meson/test cases/common/13 pch/mixed/pch/func.h new file mode 100644 index 000000000..354499acd --- /dev/null +++ b/meson/test cases/common/13 pch/mixed/pch/func.h @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/13 pch/mixed/pch/main.h b/meson/test cases/common/13 pch/mixed/pch/main.h new file mode 100644 index 000000000..751cc4a71 --- /dev/null +++ b/meson/test cases/common/13 pch/mixed/pch/main.h @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/13 pch/userDefined/meson.build b/meson/test cases/common/13 pch/userDefined/meson.build new file mode 100644 index 000000000..9b60572e6 --- /dev/null +++ b/meson/test cases/common/13 pch/userDefined/meson.build @@ -0,0 +1,10 @@ +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +# User supplied PCH implementation should override the auto +# generated one. PCH implementations are only supported for +# msvc and generally should not be used at all. Support for +# them is only kept for backwards compatibility. +if cc_id == 'msvc' + exe = executable('prog', 'prog.c', c_pch : ['pch/pch.h', 'pch/pch.c']) +endif diff --git a/meson/test cases/common/13 pch/userDefined/pch/pch.c b/meson/test cases/common/13 pch/userDefined/pch/pch.c new file mode 100644 index 000000000..6a971404d --- /dev/null +++ b/meson/test cases/common/13 pch/userDefined/pch/pch.c @@ -0,0 +1,5 @@ +#include "pch.h" + +int foo(void) { + return 0; +} diff --git a/meson/test cases/common/13 pch/userDefined/pch/pch.h b/meson/test cases/common/13 pch/userDefined/pch/pch.h new file mode 100644 index 000000000..5d5f8f0c9 --- /dev/null +++ b/meson/test cases/common/13 pch/userDefined/pch/pch.h @@ -0,0 +1 @@ +int foo(); diff --git a/meson/test cases/common/13 pch/userDefined/prog.c b/meson/test cases/common/13 pch/userDefined/prog.c new file mode 100644 index 000000000..475131b35 --- /dev/null +++ b/meson/test cases/common/13 pch/userDefined/prog.c @@ -0,0 +1,8 @@ +// No includes here, they need to come from the PCH + +int main(void) { + // Method is implemented in pch.c. + // This makes sure that we can properly handle user defined + // pch implementation files and not only auto-generated ones. + return foo(); +} diff --git a/meson/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h b/meson/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h new file mode 100644 index 000000000..53c5fdf17 --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/13 pch/withIncludeDirectories/meson.build b/meson/test cases/common/13 pch/withIncludeDirectories/meson.build new file mode 100644 index 000000000..95f7888d9 --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeDirectories/meson.build @@ -0,0 +1,15 @@ +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +if cc_id == 'lcc' + error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.') +endif + +# PGI compiler only supports PCH for C++ +if cc_id == 'pgi' + subdir_done() +endif + +exe = executable('prog', 'prog.c', + include_directories: 'include', + c_pch : 'pch/prog.h') diff --git a/meson/test cases/common/13 pch/withIncludeDirectories/pch/prog.h b/meson/test cases/common/13 pch/withIncludeDirectories/pch/prog.h new file mode 100644 index 000000000..383b2c513 --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeDirectories/pch/prog.h @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/13 pch/withIncludeDirectories/prog.c b/meson/test cases/common/13 pch/withIncludeDirectories/prog.c new file mode 100644 index 000000000..124bba063 --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeDirectories/prog.c @@ -0,0 +1,10 @@ +// No includes here, they need to come from the PCH + +void func(void) { + fprintf(stdout, "This is a function that fails if stdio is not #included.\n"); +} + +int main(void) { + return 0; +} + diff --git a/meson/test cases/common/13 pch/withIncludeFile/meson.build b/meson/test cases/common/13 pch/withIncludeFile/meson.build new file mode 100644 index 000000000..4fd232251 --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeFile/meson.build @@ -0,0 +1,18 @@ +cc = meson.get_compiler('c') +cc_id = cc.get_id() + +if cc_id == 'lcc' + error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.') +endif + +if cc.get_argument_syntax() == 'gcc' + c_args = ['-include', 'locale.h'] +elif cc.get_argument_syntax() == 'msvc' + c_args = ['/FI' + 'locale.h'] +else + subdir_done() +endif + +exe = executable('prog', 'prog.c', +c_args: c_args, +c_pch : 'pch/prog.h') diff --git a/meson/test cases/common/13 pch/withIncludeFile/pch/prog.h b/meson/test cases/common/13 pch/withIncludeFile/pch/prog.h new file mode 100644 index 000000000..c89890a77 --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeFile/pch/prog.h @@ -0,0 +1,6 @@ +#ifndef PROG_H +// Header guards for PCH confuse msvc in some situations. +// Using them here makes sure we handle this correctly. +#define PROG_H +#include +#endif diff --git a/meson/test cases/common/13 pch/withIncludeFile/prog.c b/meson/test cases/common/13 pch/withIncludeFile/prog.c new file mode 100644 index 000000000..7a9a93c6a --- /dev/null +++ b/meson/test cases/common/13 pch/withIncludeFile/prog.c @@ -0,0 +1,11 @@ +// No includes here, they need to come from the PCH or explicit inclusion + +void func(void) { + fprintf(stdout, "This is a function that fails if stdio is not #included.\n"); + setlocale(LC_ALL, ""); /* This will fail if locale.h is not included */ +} + +int main(void) { + return 0; +} + diff --git a/meson/test cases/common/130 include order/ctsub/copyfile.py b/meson/test cases/common/130 include order/ctsub/copyfile.py new file mode 100644 index 000000000..ff42ac359 --- /dev/null +++ b/meson/test cases/common/130 include order/ctsub/copyfile.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/130 include order/ctsub/emptyfile.c b/meson/test cases/common/130 include order/ctsub/emptyfile.c new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/130 include order/ctsub/main.h b/meson/test cases/common/130 include order/ctsub/main.h new file mode 100644 index 000000000..9d9acf36f --- /dev/null +++ b/meson/test cases/common/130 include order/ctsub/main.h @@ -0,0 +1 @@ +#error "ctsub/main.h included" diff --git a/meson/test cases/common/130 include order/ctsub/meson.build b/meson/test cases/common/130 include order/ctsub/meson.build new file mode 100644 index 000000000..a242e0764 --- /dev/null +++ b/meson/test cases/common/130 include order/ctsub/meson.build @@ -0,0 +1,9 @@ +# https://github.com/mesonbuild/meson/pull/2291 +copy = find_program('copyfile.py') +configure_file(input : 'main.h', + output : 'main.h', + command : [copy, '@INPUT@', '@OUTPUT@']) +ctfile = custom_target('emptyfile', + input : 'emptyfile.c', + output : 'emptyfile.c', + command : [copy, '@INPUT@', '@OUTPUT@']) diff --git a/meson/test cases/common/130 include order/inc1/hdr.h b/meson/test cases/common/130 include order/inc1/hdr.h new file mode 100644 index 000000000..9d755a830 --- /dev/null +++ b/meson/test cases/common/130 include order/inc1/hdr.h @@ -0,0 +1 @@ +#define SOME_DEFINE 42 diff --git a/meson/test cases/common/130 include order/inc2/hdr.h b/meson/test cases/common/130 include order/inc2/hdr.h new file mode 100644 index 000000000..2ebcacaf4 --- /dev/null +++ b/meson/test cases/common/130 include order/inc2/hdr.h @@ -0,0 +1 @@ +#undef SOME_DEFINE diff --git a/meson/test cases/common/130 include order/meson.build b/meson/test cases/common/130 include order/meson.build new file mode 100644 index 000000000..9f275b866 --- /dev/null +++ b/meson/test cases/common/130 include order/meson.build @@ -0,0 +1,36 @@ +project('include order', 'c') + +# Test that the order of priority of include paths (from first to last) is: +# +# 1. Target's current build directory +# 2. Target's current source directory +# 3. Include paths added with the `c_args:` kwarg +# 4. Include paths added with the `include_directories`: kwarg +# Within this, the build dir takes precedence over the source dir +# 5. Include paths added via `include_directories:` of internal deps +# Within this, the build dir takes precedence over the source dir + +# Custom target dir with a built header +subdir('ctsub') +# Defines an internal dep +subdir('sub1') +# Defines a per-target include path +subdir('sub2') +# Directory for `c_args:` include path +subdir('sub3') +# The directory where the target resides +subdir('sub4') + +# Test that the order in which internal dependencies are specified is +# preserved. This is needed especially when subprojects get involved and +# multiple build-root config.h files exist, and we must be sure that the +# correct one is found: https://github.com/mesonbuild/meson/issues/1495 +f = executable('somefxe', 'sub4/main.c', + dependencies : [correctinc, dep, wronginc]) + +test('eh', e) +test('oh', f) + +# Test that the order in include_directories() is maintained +incs = include_directories('inc1', 'inc2') +executable('ordertest', 'ordertest.c', include_directories: incs) diff --git a/meson/test cases/common/130 include order/ordertest.c b/meson/test cases/common/130 include order/ordertest.c new file mode 100644 index 000000000..775e34fa1 --- /dev/null +++ b/meson/test cases/common/130 include order/ordertest.c @@ -0,0 +1,10 @@ +#include "hdr.h" + +#if !defined(SOME_DEFINE) || SOME_DEFINE != 42 +#error "Should have picked up hdr.h from inc1/hdr.h" +#endif + +int main(void) +{ + return 0; +} diff --git a/meson/test cases/common/130 include order/sub1/main.h b/meson/test cases/common/130 include order/sub1/main.h new file mode 100644 index 000000000..acf4a358c --- /dev/null +++ b/meson/test cases/common/130 include order/sub1/main.h @@ -0,0 +1 @@ +#error "sub1/main.h included" diff --git a/meson/test cases/common/130 include order/sub1/meson.build b/meson/test cases/common/130 include order/sub1/meson.build new file mode 100644 index 000000000..9672945b6 --- /dev/null +++ b/meson/test cases/common/130 include order/sub1/meson.build @@ -0,0 +1,4 @@ +i = include_directories('.') +l = shared_library('somelib', 'some.c') +dep = declare_dependency(link_with : l, + include_directories : i) diff --git a/meson/test cases/common/130 include order/sub1/some.c b/meson/test cases/common/130 include order/sub1/some.c new file mode 100644 index 000000000..1ab0db4dd --- /dev/null +++ b/meson/test cases/common/130 include order/sub1/some.c @@ -0,0 +1,6 @@ +#if defined _WIN32 || defined __CYGWIN__ + __declspec(dllexport) +#endif +int somefunc(void) { + return 1984; +} diff --git a/meson/test cases/common/130 include order/sub1/some.h b/meson/test cases/common/130 include order/sub1/some.h new file mode 100644 index 000000000..6479492ea --- /dev/null +++ b/meson/test cases/common/130 include order/sub1/some.h @@ -0,0 +1,10 @@ +#pragma once + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllimport) +#else + #define DLL_PUBLIC +#endif + +DLL_PUBLIC +int somefunc(void); diff --git a/meson/test cases/common/130 include order/sub2/main.h b/meson/test cases/common/130 include order/sub2/main.h new file mode 100644 index 000000000..b9c0da93a --- /dev/null +++ b/meson/test cases/common/130 include order/sub2/main.h @@ -0,0 +1 @@ +#error "sub2/main.h included" diff --git a/meson/test cases/common/130 include order/sub2/meson.build b/meson/test cases/common/130 include order/sub2/meson.build new file mode 100644 index 000000000..b1e61900f --- /dev/null +++ b/meson/test cases/common/130 include order/sub2/meson.build @@ -0,0 +1,2 @@ +j = include_directories('.') +wronginc = declare_dependency(include_directories : j) diff --git a/meson/test cases/common/130 include order/sub3/main.h b/meson/test cases/common/130 include order/sub3/main.h new file mode 100644 index 000000000..1ab723141 --- /dev/null +++ b/meson/test cases/common/130 include order/sub3/main.h @@ -0,0 +1 @@ +#error "sub3/main.h included" diff --git a/meson/test cases/common/130 include order/sub3/meson.build b/meson/test cases/common/130 include order/sub3/meson.build new file mode 100644 index 000000000..0bd3906c1 --- /dev/null +++ b/meson/test cases/common/130 include order/sub3/meson.build @@ -0,0 +1 @@ +sub3 = meson.current_source_dir() diff --git a/meson/test cases/common/130 include order/sub4/main.c b/meson/test cases/common/130 include order/sub4/main.c new file mode 100644 index 000000000..89226a464 --- /dev/null +++ b/meson/test cases/common/130 include order/sub4/main.c @@ -0,0 +1,8 @@ +/* Use the <> include notation to force searching in include directories */ +#include + +int main(void) { + if (somefunc() == 1984) + return 0; + return 1; +} diff --git a/meson/test cases/common/130 include order/sub4/main.h b/meson/test cases/common/130 include order/sub4/main.h new file mode 100644 index 000000000..194d7fe5f --- /dev/null +++ b/meson/test cases/common/130 include order/sub4/main.h @@ -0,0 +1,3 @@ +#pragma once + +#include "some.h" diff --git a/meson/test cases/common/130 include order/sub4/meson.build b/meson/test cases/common/130 include order/sub4/meson.build new file mode 100644 index 000000000..c01edaa20 --- /dev/null +++ b/meson/test cases/common/130 include order/sub4/meson.build @@ -0,0 +1,6 @@ +e = executable('someexe', 'main.c', ctfile, + c_args : ['-I' + sub3], + include_directories : j, + dependencies : dep) + +correctinc = declare_dependency(include_directories : include_directories('.')) diff --git a/meson/test cases/common/131 override options/four.c b/meson/test cases/common/131 override options/four.c new file mode 100644 index 000000000..44e344f72 --- /dev/null +++ b/meson/test cases/common/131 override options/four.c @@ -0,0 +1,9 @@ +int func(void); + +static int duplicate_func(void) { + return -4; +} + +int main(void) { + return duplicate_func() + func(); +} diff --git a/meson/test cases/common/131 override options/meson.build b/meson/test cases/common/131 override options/meson.build new file mode 100644 index 000000000..4dd8d797e --- /dev/null +++ b/meson/test cases/common/131 override options/meson.build @@ -0,0 +1,6 @@ +project('option override', 'c', + default_options : 'unity=on') + +executable('mustunity', 'one.c', 'two.c') +executable('notunity', 'three.c', 'four.c', + override_options : ['unity=off']) diff --git a/meson/test cases/common/131 override options/one.c b/meson/test cases/common/131 override options/one.c new file mode 100644 index 000000000..6120d4889 --- /dev/null +++ b/meson/test cases/common/131 override options/one.c @@ -0,0 +1,3 @@ +static int hidden_func(void) { + return 0; +} diff --git a/meson/test cases/common/131 override options/three.c b/meson/test cases/common/131 override options/three.c new file mode 100644 index 000000000..094260e04 --- /dev/null +++ b/meson/test cases/common/131 override options/three.c @@ -0,0 +1,7 @@ +static int duplicate_func(void) { + return 4; +} + +int func(void) { + return duplicate_func(); +} diff --git a/meson/test cases/common/131 override options/two.c b/meson/test cases/common/131 override options/two.c new file mode 100644 index 000000000..0f8048c21 --- /dev/null +++ b/meson/test cases/common/131 override options/two.c @@ -0,0 +1,6 @@ +/* + * Requires a Unity build. Otherwise hidden_func is not specified. + */ +int main(void) { + return hidden_func(); +} diff --git a/meson/test cases/common/132 get define/concat.h b/meson/test cases/common/132 get define/concat.h new file mode 100644 index 000000000..6eb3e5e3b --- /dev/null +++ b/meson/test cases/common/132 get define/concat.h @@ -0,0 +1,24 @@ +#define __STRINGIFY(x) #x +#define TEST_STRINGIFY(x) __STRINGIFY(x) + +#define TEST_VERSION_MAJOR 6 +#define TEST_VERSION_MINOR 0 +#define TEST_VERSION_BUGFIX 0 + +#define TEST_VERSION_STR \ + TEST_STRINGIFY(TEST_VERSION_MAJOR) \ + "." TEST_STRINGIFY(TEST_VERSION_MINOR) "." TEST_STRINGIFY( \ + TEST_VERSION_BUGFIX) + +#define TEST_CONCAT_1 \ + "ab" \ + "cd" \ + "ef" \ + "" +#define TEST_CONCAT_2 1 +#define TEST_CONCAT_3 1 2 3 +#define TEST_CONCAT_4 "ab" 1 "cd" +#define TEST_CONCAT_5 \ + "ab\"" \ + "cd" +#define TEST_CONCAT_6 "ab\" \"cd" diff --git a/meson/test cases/common/132 get define/meson.build b/meson/test cases/common/132 get define/meson.build new file mode 100644 index 000000000..df3d02ab6 --- /dev/null +++ b/meson/test cases/common/132 get define/meson.build @@ -0,0 +1,104 @@ +project('get define', 'c', 'cpp') + +host_system = host_machine.system() + +foreach lang : ['c', 'cpp'] + cc = meson.get_compiler(lang) + if host_system == 'linux' + d = cc.get_define('__linux__') + assert(d == '1', '__linux__ value is @0@ instead of 1'.format(d)) + elif host_system == 'darwin' + d = cc.get_define('__APPLE__') + assert(d == '1', '__APPLE__ value is @0@ instead of 1'.format(d)) + elif host_system == 'windows' + d = cc.get_define('_WIN32') + assert(d == '1', '_WIN32 value is @0@ instead of 1'.format(d)) + elif host_system == 'cygwin' + d = cc.get_define('__CYGWIN__') + assert(d == '1', '__CYGWIN__ value is @0@ instead of 1'.format(d)) + elif host_system == 'haiku' + d = cc.get_define('__HAIKU__') + assert(d == '1', '__HAIKU__ value is @0@ instead of 1'.format(d)) + elif host_system == 'freebsd' + # the __FreeBSD__ define will be equal to the major version of the release + # (ex, in FreeBSD 11.x, __FreeBSD__ == 11). To make the test robust when + # being run on various versions of FreeBSD, just test that the define is + # set. + d = cc.get_define('__FreeBSD__') + assert(d != '', '__FreeBSD__ value is unset') + elif host_system == 'dragonfly' + d = cc.get_define('__DragonFly__') + assert(d == '1', '__DragonFly__ value is @0@ instead of 1'.format(d)) + elif host_system == 'netbsd' + d = cc.get_define('__NetBSD__') + assert(d == '1', '__NetBSD__ value is @0@ instead of 1'.format(d)) + elif host_system == 'openbsd' + d = cc.get_define('__OpenBSD__') + assert(d == '1', '__OpenBSD__ value is @0@ instead of 1'.format(d)) + elif host_system == 'gnu' + d = cc.get_define('__GNU__') + assert(d == '1', '__GNU__ value is @0@ instead of 1'.format(d)) + elif host_system == 'sunos' + d = cc.get_define('__sun__') + assert(d == '1', '__sun__ value is @0@ instead of 1'.format(d)) + else + error('Please report a bug and help us improve support for this platform') + endif + + if cc.find_library('z', required : false).found() + # When a C file containing #include is pre-processed and foo.h is + # found in the compiler's default search path, GCC inserts an extra comment + # between the delimiter and the define which causes a parsing error. + # https://github.com/mesonbuild/meson/issues/1726 + if host_machine.system() == 'netbsd' or host_machine.system() == 'openbsd' + # NetBSD and OpenBSD's zlib don't have a ZLIB_VER_MAJOR, but they do have + # a ZLIB_VERSION (which is a string), so check the first non-quote + # character of that. + ver = cc.get_define('ZLIB_VERSION', prefix : '#include ')[1] + assert(ver == '1', 'ZLIB_VERSION (major) value is "@0@" instead of "1"'.format(ver)) + else + ver = cc.get_define('ZLIB_VER_MAJOR', prefix : '#include ') + assert(ver == '1', 'ZLIB_VER_MAJOR value is "@0@" instead of "1"'.format(ver)) + endif + endif + + # Check that an undefined value is empty. + have = cc.get_define('MESON_FAIL_VALUE') + assert(have == '', 'MESON_FAIL_VALUE value is "@0@" instead of ""'.format(have)) + + # This is used in the test_preprocessor_checks_CPPFLAGS() unit test. + have = cc.get_define('MESON_TEST_DEFINE_VALUE') + expect = get_option('MESON_TEST_DEFINE_VALUE') + assert(have == expect, 'MESON_TEST_DEFINE_VALUE value is "@0@" instead of "@1@"'.format(have, expect)) + + run_1665_test = false + if meson.is_cross_build() + lang_arg = meson.get_cross_property(lang + '_args', '') + if lang_arg == '-DMESON_TEST_ISSUE_1665=1' + run_1665_test = true + endif + endif + + if run_1665_test + have = cc.get_define('MESON_TEST_ISSUE_1665') + assert(have == '1', 'MESON_TEST_ISSUE_1665 value is "@0@" instead of "1"'.format(have)) + endif + + have = cc.get_define('TEST_VERSION_STR', + prefix : '#include ', include_directories: include_directories('.')) + assert(have == '"6.0.0"', 'TEST_VERSION_STR value is "@0@" instead of ""6.0.0""'.format(have)) + + concat_examples = { + 'TEST_CONCAT_1': '"abcdef"', + 'TEST_CONCAT_2': '1', + 'TEST_CONCAT_3': '1 2 3', + 'TEST_CONCAT_4': '"ab" 1 "cd"', + 'TEST_CONCAT_5': '"ab\"cd"', + 'TEST_CONCAT_6': '"ab\" \"cd"', + } + foreach def,expected : concat_examples + have = cc.get_define(def, + prefix : '#include ', include_directories: include_directories('.')) + assert(have == expected, '@0@ value is "@1@" instead of "@2@"'.format(def, have, expected)) + endforeach +endforeach diff --git a/meson/test cases/common/132 get define/meson_options.txt b/meson/test cases/common/132 get define/meson_options.txt new file mode 100644 index 000000000..7d34a2e60 --- /dev/null +++ b/meson/test cases/common/132 get define/meson_options.txt @@ -0,0 +1 @@ +option('MESON_TEST_DEFINE_VALUE', type : 'string', value : '') diff --git a/meson/test cases/common/133 c cpp and asm/main.c b/meson/test cases/common/133 c cpp and asm/main.c new file mode 100644 index 000000000..293258f51 --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/main.c @@ -0,0 +1,8 @@ +#include + +int get_retval(void); + +int main(void) { + printf("C seems to be working.\n"); + return get_retval(); +} diff --git a/meson/test cases/common/133 c cpp and asm/main.cpp b/meson/test cases/common/133 c cpp and asm/main.cpp new file mode 100644 index 000000000..debb97ac6 --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/main.cpp @@ -0,0 +1,11 @@ +#include + +extern "C" { + int get_retval(void); + int get_cval(void); +} + +int main(void) { + std::cout << "C++ seems to be working." << std::endl; + return get_retval(); +} diff --git a/meson/test cases/common/133 c cpp and asm/meson.build b/meson/test cases/common/133 c cpp and asm/meson.build new file mode 100644 index 000000000..ca820e2ae --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/meson.build @@ -0,0 +1,23 @@ +project('c cpp and asm', 'c', 'cpp') + +cpu = host_machine.cpu_family() +cc = meson.get_compiler('c') + +supported_cpus = ['arm', 'x86', 'x86_64'] + +if not supported_cpus.contains(cpu) + error('MESON_SKIP_TEST unsupported cpu:' + cpu) +endif + +if meson.get_compiler('c').get_argument_syntax() == 'msvc' + error('MESON_SKIP_TEST MSVC can\'t compile assembly') +endif + +if cc.symbols_have_underscore_prefix() + add_project_arguments('-DMESON_TEST__UNDERSCORE_SYMBOL', language: 'c') +endif + +test('test-c-asm', executable('c-asm', ['main.c', 'retval-' + cpu + '.S'])) +test('test-cpp-asm', executable('cpp-asm', ['main.cpp', 'retval-' + cpu + '.S'])) +test('test-c-cpp-asm', executable('c-cpp-asm', ['somelib.c', 'main.cpp', 'retval-' + cpu + '.S'])) +test('test-cpp-c-asm', executable('cpp-c-asm', ['main.cpp', 'somelib.c', 'retval-' + cpu + '.S'])) diff --git a/meson/test cases/common/133 c cpp and asm/retval-arm.S b/meson/test cases/common/133 c cpp and asm/retval-arm.S new file mode 100644 index 000000000..a8923624a --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/retval-arm.S @@ -0,0 +1,11 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(get_retval) +# ifdef __linux__ +.type get_retval, %function +#endif + +SYMBOL_NAME(get_retval): + mov r0, #0 + mov pc, lr diff --git a/meson/test cases/common/133 c cpp and asm/retval-x86.S b/meson/test cases/common/133 c cpp and asm/retval-x86.S new file mode 100644 index 000000000..3cb023764 --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/retval-x86.S @@ -0,0 +1,12 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(get_retval) +/* Only supported on Linux with GAS */ +# ifdef __linux__ +.type get_retval, %function +#endif + +SYMBOL_NAME(get_retval): + xorl %eax, %eax + retl diff --git a/meson/test cases/common/133 c cpp and asm/retval-x86_64.S b/meson/test cases/common/133 c cpp and asm/retval-x86_64.S new file mode 100644 index 000000000..1a5f3eb23 --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/retval-x86_64.S @@ -0,0 +1,11 @@ +#include "symbol-underscore.h" + +.text +.globl SYMBOL_NAME(get_retval) +# ifdef __linux__ +.type get_retval, %function +#endif + +SYMBOL_NAME(get_retval): + xorl %eax, %eax + retq diff --git a/meson/test cases/common/133 c cpp and asm/somelib.c b/meson/test cases/common/133 c cpp and asm/somelib.c new file mode 100644 index 000000000..e585b8e9f --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/somelib.c @@ -0,0 +1,3 @@ +int get_cval (void) { + return 0; +} diff --git a/meson/test cases/common/133 c cpp and asm/symbol-underscore.h b/meson/test cases/common/133 c cpp and asm/symbol-underscore.h new file mode 100644 index 000000000..d0f3ef9cc --- /dev/null +++ b/meson/test cases/common/133 c cpp and asm/symbol-underscore.h @@ -0,0 +1,5 @@ +#if defined(MESON_TEST__UNDERSCORE_SYMBOL) +# define SYMBOL_NAME(name) _##name +#else +# define SYMBOL_NAME(name) name +#endif diff --git a/meson/test cases/common/134 compute int/config.h.in b/meson/test cases/common/134 compute int/config.h.in new file mode 100644 index 000000000..0de63ab7d --- /dev/null +++ b/meson/test cases/common/134 compute int/config.h.in @@ -0,0 +1,4 @@ +#define INTSIZE @INTSIZE@ +#define FOOBAR_IN_CONFIG_H @FOOBAR@ +#define MAXINT @MAXINT@ +#define MININT @MININT@ diff --git a/meson/test cases/common/134 compute int/foobar.h b/meson/test cases/common/134 compute int/foobar.h new file mode 100644 index 000000000..fd3cb5ea6 --- /dev/null +++ b/meson/test cases/common/134 compute int/foobar.h @@ -0,0 +1,6 @@ +#ifndef __FOOBAR_H__ +#define __FOOBAR_H__ + +#define FOOBAR_IN_FOOBAR_H 10 + +#endif /*__FOOBAR_H__*/ diff --git a/meson/test cases/common/134 compute int/meson.build b/meson/test cases/common/134 compute int/meson.build new file mode 100644 index 000000000..89f4746fe --- /dev/null +++ b/meson/test cases/common/134 compute int/meson.build @@ -0,0 +1,46 @@ +project('compute int', 'c', 'cpp') + +inc = include_directories('.') + +# Test with C +cc = meson.get_compiler('c') + +intsize = cc.compute_int('sizeof(int)', low : 1, high : 16, guess : 4) +foobar = cc.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc) +maxint = cc.compute_int('INT_MAX', prefix: '#include ') +minint = cc.compute_int('INT_MIN', prefix: '#include ') + +# Regression test for the special case -1 that used to fail when cross compiling +assert(cc.compute_int('-1') == -1, 'compute_int(-1) failed') + +cd = configuration_data() +cd.set('INTSIZE', intsize) +cd.set('FOOBAR', foobar) +cd.set('CONFIG', 'config.h') +cd.set('MAXINT', maxint) +cd.set('MININT', minint) +configure_file(input : 'config.h.in', output : 'config.h', configuration : cd) +s = configure_file(input : 'prog.c.in', output : 'prog.c', configuration : cd) + +e = executable('prog', s) +test('compute int test', e) + +# Test with C++ +cpp = meson.get_compiler('cpp') + +intsize = cpp.compute_int('sizeof(int)') +foobar = cpp.compute_int('FOOBAR_IN_FOOBAR_H', prefix : '#include "foobar.h"', include_directories : inc) +maxint = cpp.compute_int('INT_MAX', prefix: '#include ') +minint = cpp.compute_int('INT_MIN', prefix: '#include ') + +cdpp = configuration_data() +cdpp.set('INTSIZE', intsize) +cdpp.set('FOOBAR', foobar) +cdpp.set('CONFIG', 'config.hpp') +cdpp.set('MAXINT', maxint) +cdpp.set('MININT', minint) +configure_file(input : 'config.h.in', output : 'config.hpp', configuration : cdpp) +spp = configure_file(input : 'prog.c.in', output : 'prog.cc', configuration : cdpp) + +epp = executable('progpp', spp) +test('compute int test c++', epp) diff --git a/meson/test cases/common/134 compute int/prog.c.in b/meson/test cases/common/134 compute int/prog.c.in new file mode 100644 index 000000000..0983aff17 --- /dev/null +++ b/meson/test cases/common/134 compute int/prog.c.in @@ -0,0 +1,25 @@ +#include "@CONFIG@" +#include +#include +#include +#include "foobar.h" + +int main(void) { + if(INTSIZE != sizeof(int)) { + fprintf(stderr, "Mismatch: computed int size %d, actual size %d.\n", INTSIZE, (int)sizeof(int)); + return 1; + } + if(FOOBAR_IN_CONFIG_H != FOOBAR_IN_FOOBAR_H) { + fprintf(stderr, "Mismatch: computed int %d, should be %d.\n", FOOBAR_IN_CONFIG_H, FOOBAR_IN_FOOBAR_H); + return 1; + } + if(MAXINT != INT_MAX) { + fprintf(stderr, "Mismatch: computed max int %d, should be %d.\n", MAXINT, INT_MAX); + return 1; + } + if(MININT != INT_MIN) { + fprintf(stderr, "Mismatch: computed min int %d, should be %d.\n", MININT, INT_MIN); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/135 custom target object output/meson.build b/meson/test cases/common/135 custom target object output/meson.build new file mode 100644 index 000000000..ede165bd4 --- /dev/null +++ b/meson/test cases/common/135 custom target object output/meson.build @@ -0,0 +1,16 @@ +project('custom target object output', 'c') + +comp = find_program('obj_generator.py') + +if host_machine.system() == 'windows' + outputname = '@BASENAME@.obj' +else + outputname = '@BASENAME@.o' +endif + +cc = meson.get_compiler('c').cmd_array().get(-1) + +subdir('objdir') +subdir('progdir') + +test('objgen', e) diff --git a/meson/test cases/common/135 custom target object output/obj_generator.py b/meson/test cases/common/135 custom target object output/obj_generator.py new file mode 100644 index 000000000..a33872aac --- /dev/null +++ b/meson/test cases/common/135 custom target object output/obj_generator.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +# Mimic a binary that generates an object file (e.g. windres). + +import sys, subprocess + +if __name__ == '__main__': + if len(sys.argv) != 4: + print(sys.argv[0], 'compiler input_file output_file') + sys.exit(1) + compiler = sys.argv[1] + ifile = sys.argv[2] + ofile = sys.argv[3] + if compiler.endswith('cl'): + cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile] + else: + cmd = [compiler, '-c', ifile, '-o', ofile] + sys.exit(subprocess.call(cmd)) diff --git a/meson/test cases/common/135 custom target object output/objdir/meson.build b/meson/test cases/common/135 custom target object output/objdir/meson.build new file mode 100644 index 000000000..0d7f6c259 --- /dev/null +++ b/meson/test cases/common/135 custom target object output/objdir/meson.build @@ -0,0 +1,5 @@ +# Generate an object file manually. +object = custom_target('object', + input : 'source.c', + output : outputname, + command : [comp, cc, '@INPUT@', '@OUTPUT@']) diff --git a/meson/test cases/common/135 custom target object output/objdir/source.c b/meson/test cases/common/135 custom target object output/objdir/source.c new file mode 100644 index 000000000..1dc08e168 --- /dev/null +++ b/meson/test cases/common/135 custom target object output/objdir/source.c @@ -0,0 +1,3 @@ +int func1_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/135 custom target object output/progdir/meson.build b/meson/test cases/common/135 custom target object output/progdir/meson.build new file mode 100644 index 000000000..4216c24a6 --- /dev/null +++ b/meson/test cases/common/135 custom target object output/progdir/meson.build @@ -0,0 +1 @@ +e = executable('prog', 'prog.c', object) diff --git a/meson/test cases/common/135 custom target object output/progdir/prog.c b/meson/test cases/common/135 custom target object output/progdir/prog.c new file mode 100644 index 000000000..bc3caf3e2 --- /dev/null +++ b/meson/test cases/common/135 custom target object output/progdir/prog.c @@ -0,0 +1,5 @@ +int func1_in_obj(void); + +int main(void) { + return func1_in_obj(); +} diff --git a/meson/test cases/common/136 empty build file/meson.build b/meson/test cases/common/136 empty build file/meson.build new file mode 100644 index 000000000..73d03974f --- /dev/null +++ b/meson/test cases/common/136 empty build file/meson.build @@ -0,0 +1,2 @@ +project('subdir with empty meson.build test', 'c') +subdir('subdir') diff --git a/meson/test cases/common/136 empty build file/subdir/meson.build b/meson/test cases/common/136 empty build file/subdir/meson.build new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/137 whole archive/exe/meson.build b/meson/test cases/common/137 whole archive/exe/meson.build new file mode 100644 index 000000000..91d298dce --- /dev/null +++ b/meson/test cases/common/137 whole archive/exe/meson.build @@ -0,0 +1 @@ +exe = executable('prog', '../prog.c', link_with : sh_func2_linked_func1) diff --git a/meson/test cases/common/137 whole archive/exe2/meson.build b/meson/test cases/common/137 whole archive/exe2/meson.build new file mode 100644 index 000000000..918486402 --- /dev/null +++ b/meson/test cases/common/137 whole archive/exe2/meson.build @@ -0,0 +1 @@ +exe2 = executable('prog2', '../prog.c', link_with : sh_only_link_whole) diff --git a/meson/test cases/common/137 whole archive/exe3/meson.build b/meson/test cases/common/137 whole archive/exe3/meson.build new file mode 100644 index 000000000..82cf57e05 --- /dev/null +++ b/meson/test cases/common/137 whole archive/exe3/meson.build @@ -0,0 +1 @@ +exe3 = executable('prog3', '../prog.c', link_with : sh_func2_dep_func1) diff --git a/meson/test cases/common/137 whole archive/exe4/meson.build b/meson/test cases/common/137 whole archive/exe4/meson.build new file mode 100644 index 000000000..0781250fb --- /dev/null +++ b/meson/test cases/common/137 whole archive/exe4/meson.build @@ -0,0 +1 @@ +exe4 = executable('prog4', '../prog.c', link_with : sh_func2_transdep_func1) diff --git a/meson/test cases/common/137 whole archive/func1.c b/meson/test cases/common/137 whole archive/func1.c new file mode 100644 index 000000000..161c5da1c --- /dev/null +++ b/meson/test cases/common/137 whole archive/func1.c @@ -0,0 +1,7 @@ +#define BUILDING_DLL + +#include + +int func1(void) { + return 42; +} diff --git a/meson/test cases/common/137 whole archive/func2.c b/meson/test cases/common/137 whole archive/func2.c new file mode 100644 index 000000000..4fe7150ff --- /dev/null +++ b/meson/test cases/common/137 whole archive/func2.c @@ -0,0 +1,7 @@ +#define BUILDING_DLL + +#include + +int func2(void) { + return 42; +} diff --git a/meson/test cases/common/137 whole archive/meson.build b/meson/test cases/common/137 whole archive/meson.build new file mode 100644 index 000000000..d4cbb832c --- /dev/null +++ b/meson/test cases/common/137 whole archive/meson.build @@ -0,0 +1,49 @@ +project('whole archive', 'c') + +if meson.backend() == 'xcode' or \ + meson.backend() == 'vs2010' or \ + meson.backend() == 'vs2012' or \ + meson.backend() == 'vs2013' + error('MESON_SKIP_TEST: whole-archive not supported in Xcode nor pre-VS2015 IDE. Patches welcome.') +endif + +add_project_arguments('-I' + meson.source_root(), language : 'c') + +# Test 1: link_whole keeps all symbols +# Make static func1 +subdir('st_func1') +# Make shared func2 linking whole func1 archive +subdir('sh_func2_linked_func1') +# Link exe with shared library only +subdir('exe') +# Test that both func1 and func2 are accessible from shared library +test('prog', exe) + +# Test 2: link_whole can be used instead of source list, see #2180 +# Make static func2 +subdir('st_func2') +# Link both func1 and func2 into same shared library +# which does not have any sources other than 2 static libraries +subdir('sh_only_link_whole') +# Link exe2 with shared library only +subdir('exe2') +# Test that both func1 and func2 are accessible from shared library +test('prog2', exe2) + +# Test 3: link_whole can be used in declare_dependency() +func1_dep = declare_dependency(link_whole : [st_func1]) +# Use dependency to link func1 into shared library +subdir('sh_func2_dep_func1') +# Link exe3 with shared library +subdir('exe3') +# Test that both func1 and func2 are accessible from shared library +test('prog3', exe3) + +# Test 4: link_whole can be used in transitive declare_dependency() +func1_trans_dep = declare_dependency(dependencies : func1_dep) +# Use transitive dependency to link func1 into shared library +subdir('sh_func2_transdep_func1') +# Link exe4 with shared library +subdir('exe4') +# Test that both func1 and func2 are accessible from shared library +test('prog4', exe4) diff --git a/meson/test cases/common/137 whole archive/mylib.h b/meson/test cases/common/137 whole archive/mylib.h new file mode 100644 index 000000000..79ce585ef --- /dev/null +++ b/meson/test cases/common/137 whole archive/mylib.h @@ -0,0 +1,21 @@ +#pragma once + +/* Both funcs here for simplicity. */ + +#if defined _WIN32 || defined __CYGWIN__ +#if defined BUILDING_DLL + #define DLL_PUBLIC __declspec(dllexport) +#else + #define DLL_PUBLIC __declspec(dllimport) +#endif +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func1(void); +int DLL_PUBLIC func2(void); diff --git a/meson/test cases/common/137 whole archive/prog.c b/meson/test cases/common/137 whole archive/prog.c new file mode 100644 index 000000000..1f553e554 --- /dev/null +++ b/meson/test cases/common/137 whole archive/prog.c @@ -0,0 +1,5 @@ +#include + +int main(void) { + return func1() - func2(); +} diff --git a/meson/test cases/common/137 whole archive/sh_func2_dep_func1/meson.build b/meson/test cases/common/137 whole archive/sh_func2_dep_func1/meson.build new file mode 100644 index 000000000..92baca6fb --- /dev/null +++ b/meson/test cases/common/137 whole archive/sh_func2_dep_func1/meson.build @@ -0,0 +1,4 @@ +# Same as sh_func2_linked_func1, # func2.c does not depend on func1(), +# so without link_whole compiler would throw func1() away. +# This is the same version of the test with a dependency object instead. +sh_func2_dep_func1 = shared_library('sh_func2_dep_func1', '../func2.c', dependencies : func1_dep) diff --git a/meson/test cases/common/137 whole archive/sh_func2_linked_func1/meson.build b/meson/test cases/common/137 whole archive/sh_func2_linked_func1/meson.build new file mode 100644 index 000000000..2858f65ec --- /dev/null +++ b/meson/test cases/common/137 whole archive/sh_func2_linked_func1/meson.build @@ -0,0 +1,3 @@ +# Nothing in func2.c uses func1, so the linker would throw it +# away and thus linking the exe would fail. +sh_func2_linked_func1 = shared_library('sh_func2_linked_func1', '../func2.c', link_whole : st_func1) diff --git a/meson/test cases/common/137 whole archive/sh_func2_transdep_func1/meson.build b/meson/test cases/common/137 whole archive/sh_func2_transdep_func1/meson.build new file mode 100644 index 000000000..07030772e --- /dev/null +++ b/meson/test cases/common/137 whole archive/sh_func2_transdep_func1/meson.build @@ -0,0 +1,6 @@ +# Same as sh_func2_dep_func1 but dependency is transitive. +# func2.c does not have any reference to func1() so without link_whole compiler +# should throw func1() out. +sh_func2_transdep_func1 = shared_library( + 'sh_func2_transdep_func1', '../func2.c', + dependencies : func1_trans_dep) diff --git a/meson/test cases/common/137 whole archive/sh_only_link_whole/meson.build b/meson/test cases/common/137 whole archive/sh_only_link_whole/meson.build new file mode 100644 index 000000000..64baabd5d --- /dev/null +++ b/meson/test cases/common/137 whole archive/sh_only_link_whole/meson.build @@ -0,0 +1 @@ +sh_only_link_whole = shared_library('sh_only_link_whole', link_whole : [st_func1, st_func2]) diff --git a/meson/test cases/common/137 whole archive/st_func1/meson.build b/meson/test cases/common/137 whole archive/st_func1/meson.build new file mode 100644 index 000000000..c84d78109 --- /dev/null +++ b/meson/test cases/common/137 whole archive/st_func1/meson.build @@ -0,0 +1 @@ +st_func1 = static_library('st_func1', '../func1.c') diff --git a/meson/test cases/common/137 whole archive/st_func2/meson.build b/meson/test cases/common/137 whole archive/st_func2/meson.build new file mode 100644 index 000000000..2732f96ec --- /dev/null +++ b/meson/test cases/common/137 whole archive/st_func2/meson.build @@ -0,0 +1 @@ +st_func2 = static_library('st_func2', '../func2.c') diff --git a/meson/test cases/common/138 C and CPP link/dummy.c b/meson/test cases/common/138 C and CPP link/dummy.c new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/138 C and CPP link/foo.c b/meson/test cases/common/138 C and CPP link/foo.c new file mode 100644 index 000000000..77c7e39ff --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/foo.c @@ -0,0 +1,19 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "foo.h" + +int forty_two(void) { + return 42; +} diff --git a/meson/test cases/common/138 C and CPP link/foo.cpp b/meson/test cases/common/138 C and CPP link/foo.cpp new file mode 100644 index 000000000..9db7fb238 --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/foo.cpp @@ -0,0 +1,34 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include + +const int cnums[] = {0, 61}; + +/* Provided by foobar.c */ +extern "C" int get_number_index (void); + +template +std::vector makeVector(const T (&data)[N]) +{ + return std::vector(data, data+N); +} + +namespace { + std::vector numbers = makeVector(cnums); +} + +extern "C" int six_one(void) { + return numbers[get_number_index ()]; +} diff --git a/meson/test cases/common/138 C and CPP link/foo.h b/meson/test cases/common/138 C and CPP link/foo.h new file mode 100644 index 000000000..1ed8ce9b3 --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/foo.h @@ -0,0 +1,16 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +int forty_two(void); diff --git a/meson/test cases/common/138 C and CPP link/foo.hpp b/meson/test cases/common/138 C and CPP link/foo.hpp new file mode 100644 index 000000000..e47f01dce --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/foo.hpp @@ -0,0 +1,24 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifdef __cplusplus +extern "C" { +#endif + +int six_one(void); + +#ifdef __cplusplus +} +#endif diff --git a/meson/test cases/common/138 C and CPP link/foobar.c b/meson/test cases/common/138 C and CPP link/foobar.c new file mode 100644 index 000000000..27928bff5 --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/foobar.c @@ -0,0 +1,27 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "foo.h" +#include "foo.hpp" +#include "foobar.h" + +int get_number_index (void) { + return 1; +} + +void mynumbers(int nums[]) { + nums[0] = forty_two(); + nums[1] = six_one(); +} diff --git a/meson/test cases/common/138 C and CPP link/foobar.h b/meson/test cases/common/138 C and CPP link/foobar.h new file mode 100644 index 000000000..6dcb09664 --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/foobar.h @@ -0,0 +1,16 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +void mynumbers(int nums[]); diff --git a/meson/test cases/common/138 C and CPP link/meson.build b/meson/test cases/common/138 C and CPP link/meson.build new file mode 100644 index 000000000..32d1843ad --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/meson.build @@ -0,0 +1,133 @@ +# Copyright © 2017 Dylan Baker +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project('C and C++ static link test', ['c', 'cpp']) + +if meson.backend() == 'xcode' + error('''MESON_SKIP_TEST: overriding link language is not supported in Xcode. + +If you really need this, then patches are welcome. The only known way is +to create a dummy C++ file in the meson-private directory and adding +that to the target's source list when needed. The primitives exist +but may need some tweaking. Grep for language_stdlib_only_link_flags to find +where this is handled in other backends.''') +endif + +# Verify that adding link arguments works. +add_global_link_arguments('', language : 'c') +add_project_link_arguments('', language : 'c') + +libc = static_library('cfoo', ['foo.c', 'foo.h']) + +# Test that linking C libs to external static C++ libs uses the C++ linker +# Since we can't depend on the test system to provide this, we create one +# ourselves at configure time and then 'find' it with cxx.find_library(). +cxx = meson.get_compiler('cpp') + +if cxx.get_argument_syntax() == 'msvc' + if cxx.get_id() == 'msvc' + static_linker = find_program('lib') + elif cxx.get_id() == 'clang-cl' + static_linker = find_program('llvm-lib') + elif cxx.get_id() == 'intel-cl' + static_linker = find_program('xilib') + else + error('unable to determine static linker to use with this compiler') + endif + compile_cmd = ['/c', '@INPUT@', '/Fo@OUTPUT@'] + stlib_cmd = [static_linker, '/OUT:@OUTPUT@', '@INPUT@'] +else + picflag = [] + if not ['darwin', 'windows'].contains(host_machine.system()) + picflag = ['-fPIC'] + endif + compile_cmd = ['-c', picflag, '@INPUT@', '-o', '@OUTPUT@'] + stlib_cmd = ['ar', 'csr', '@OUTPUT@', '@INPUT@'] +endif + +foo_cpp_o = configure_file( + input : 'foo.cpp', + output : 'foo.cpp.o', + command : cxx.cmd_array() + compile_cmd) + +configure_file( + input : foo_cpp_o, + output : 'libstcppext.a', + command : stlib_cmd) + +libstcppext = cxx.find_library('stcppext', dirs : meson.current_build_dir()) +lib_type_name = libstcppext.type_name() +assert(lib_type_name == 'library', 'type name is ' + lib_type_name) + +libfooext = shared_library( + 'fooext', + ['foobar.c', 'foobar.h'], + link_with : libc, + dependencies : libstcppext, +) + +# Test that linking C libs to internal static C++ libs uses the C++ linker +libcpp = static_library('cppfoo', ['foo.cpp', 'foo.hpp']) + +libfoo = shared_library( + 'foo', + ['foobar.c', 'foobar.h'], + link_with : [libc, libcpp], +) + +# Test that link_whole is also honored +# +# VS2010 lacks the /WHOLEARCHIVE option that later versions of MSVC support, so +# don't run this tests on that backend. +if not (cxx.get_id() == 'msvc' and cxx.version().version_compare('<19')) + libfoowhole = shared_library( + 'foowhole', + ['foobar.c', 'foobar.h'], + link_whole : [libc, libcpp], + ) +endif + +# Test sublinking (linking C and C++, then linking that to C) +libfoo_static = static_library( + 'foo_static', + ['foobar.c', 'foobar.h'], + link_with : [libc, libcpp], +) + +libsub = shared_library( + 'sub', + ['sub.c', 'sub.h'], + link_with : libfoo_static, +) + +if not (cxx.get_id() == 'msvc' and cxx.version().version_compare('<19')) + libsubwhole = shared_library( + 'subwhole', + ['sub.c', 'sub.h'], + link_whole : libfoo_static, + ) +endif + +# Test that it really is recursive +libsub_static = static_library( + 'sub_static', + ['sub.c', 'sub.h'], + link_with : libfoo_static, +) + +libsubsub = shared_library( + 'subsub', + ['dummy.c'], + link_with : libsub_static, +) diff --git a/meson/test cases/common/138 C and CPP link/sub.c b/meson/test cases/common/138 C and CPP link/sub.c new file mode 100644 index 000000000..7c078f8e3 --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/sub.c @@ -0,0 +1,19 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sub.h" + +float a_half(void) { + return .5; +} diff --git a/meson/test cases/common/138 C and CPP link/sub.h b/meson/test cases/common/138 C and CPP link/sub.h new file mode 100644 index 000000000..5b02e172d --- /dev/null +++ b/meson/test cases/common/138 C and CPP link/sub.h @@ -0,0 +1,16 @@ +/* Copyright © 2017 Dylan Baker + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +float a_half(void); diff --git a/meson/test cases/common/139 mesonintrospect from scripts/check_env.py b/meson/test cases/common/139 mesonintrospect from scripts/check_env.py new file mode 100644 index 000000000..61de54674 --- /dev/null +++ b/meson/test cases/common/139 mesonintrospect from scripts/check_env.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +import os +import sys +import shlex + +do_print = False + +if len(sys.argv) > 1: + do_print = bool(sys.argv[1]) + +if 'MESONINTROSPECT' not in os.environ: + raise RuntimeError('MESONINTROSPECT not found') + +mesonintrospect = os.environ['MESONINTROSPECT'] + +introspect_arr = shlex.split(mesonintrospect) + +# print(mesonintrospect) +# print(introspect_arr) + +some_executable = introspect_arr[0] + +if not os.path.isfile(some_executable): + raise RuntimeError(f'{mesonintrospect!r} does not exist') + +if do_print: + print(some_executable, end='') diff --git a/meson/test cases/common/139 mesonintrospect from scripts/check_introspection.py b/meson/test cases/common/139 mesonintrospect from scripts/check_introspection.py new file mode 100644 index 000000000..851a415c4 --- /dev/null +++ b/meson/test cases/common/139 mesonintrospect from scripts/check_introspection.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import os +import shlex +import subprocess + + +if 'MESONINTROSPECT' not in os.environ: + raise RuntimeError('MESONINTROSPECT not found') +if 'MESON_BUILD_ROOT' not in os.environ: + raise RuntimeError('MESON_BUILD_ROOT not found') + +mesonintrospect = os.environ['MESONINTROSPECT'] +introspect_arr = shlex.split(mesonintrospect) + +buildroot = os.environ['MESON_BUILD_ROOT'] + +subprocess.check_output([*introspect_arr, '--all', buildroot]) diff --git a/meson/test cases/common/139 mesonintrospect from scripts/meson.build b/meson/test cases/common/139 mesonintrospect from scripts/meson.build new file mode 100644 index 000000000..11176727d --- /dev/null +++ b/meson/test cases/common/139 mesonintrospect from scripts/meson.build @@ -0,0 +1,14 @@ +project('mesonintrospect from scripts', 'c') + +python = import('python3').find_python() + +ret = run_command(python, ['check_env.py', '1']) +if ret.returncode() == 0 + find_program(ret.stdout()) +else + message(ret.stdout()) + message(ret.stderr()) +endif + +meson.add_postconf_script('check_introspection.py') +meson.add_install_script('check_env.py') diff --git a/meson/test cases/common/14 configure file/basename.py b/meson/test cases/common/14 configure file/basename.py new file mode 100644 index 000000000..d2c866216 --- /dev/null +++ b/meson/test cases/common/14 configure file/basename.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +import sys +import argparse +import os + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('text', nargs='*', type=str) + args = parser.parse_args() + + text = args.text if isinstance(args.text, list) else [args.text] + + output = '' + for t in text: + t = os.path.basename(t) + + if not output: + output += t + else: + output += ' ' + t + + output += '\n' + + sys.stdout.write(output) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/meson/test cases/common/14 configure file/check_file.py b/meson/test cases/common/14 configure file/check_file.py new file mode 100644 index 000000000..a96614702 --- /dev/null +++ b/meson/test cases/common/14 configure file/check_file.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 + +import os +import sys + +def permit_osx_workaround(m1, m2): + import platform + if platform.system().lower() != 'darwin': + return False + if m2 % 10000 != 0: + return False + if m1//10000 != m2//10000: + return False + return True + +if len(sys.argv) == 2: + assert(os.path.exists(sys.argv[1])) +elif len(sys.argv) == 3: + f1 = sys.argv[1] + f2 = sys.argv[2] + m1 = os.stat(f1).st_mtime_ns + m2 = os.stat(f2).st_mtime_ns + # Compare only os.stat() + if m1 != m2: + # Under macOS the lower four digits sometimes get assigned + # zero, even though shutil.copy2 should preserve metadata. + # Just have to accept it, I guess. + if not permit_osx_workaround(m1, m2): + raise RuntimeError(f'mtime of {f1!r} ({m1!r}) != mtime of {f2!r} ({m2!r})') + import filecmp + if not filecmp.cmp(f1, f2): + raise RuntimeError(f'{f1!r} != {f2!r}') +else: + raise AssertionError diff --git a/meson/test cases/common/14 configure file/check_inputs.py b/meson/test cases/common/14 configure file/check_inputs.py new file mode 100644 index 000000000..1faa8ba05 --- /dev/null +++ b/meson/test cases/common/14 configure file/check_inputs.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import sys +from pathlib import Path + +files = [Path(f) for f in sys.argv[1:]] +names = [f.name for f in files] + +assert names == ['check_inputs.txt', 'prog.c', 'prog.c', 'prog2.c', 'prog4.c', 'prog5.c'] +for f in files[1:]: + assert f.exists() + +with files[0].open('w') as ofile: + ofile.write("#define ZERO_RESULT 0\n") diff --git a/meson/test cases/common/14 configure file/config.h b/meson/test cases/common/14 configure file/config.h new file mode 100644 index 000000000..e85b634b5 --- /dev/null +++ b/meson/test cases/common/14 configure file/config.h @@ -0,0 +1 @@ +#error "This file should not be included. Build dir must become before source dir in search order" diff --git a/meson/test cases/common/14 configure file/config.h.in b/meson/test cases/common/14 configure file/config.h.in new file mode 100644 index 000000000..14a155874 --- /dev/null +++ b/meson/test cases/common/14 configure file/config.h.in @@ -0,0 +1,5 @@ +#define MESSAGE "@var@" +#define OTHER "@other@" "@second@" "@empty@" + +#mesondefine BE_TRUE +#mesondefine SHOULD_BE_UNDEF diff --git a/meson/test cases/common/14 configure file/config4a.h.in b/meson/test cases/common/14 configure file/config4a.h.in new file mode 100644 index 000000000..aafd195c2 --- /dev/null +++ b/meson/test cases/common/14 configure file/config4a.h.in @@ -0,0 +1,2 @@ +/* Dummy file */ +#define RESULTA @ZERO@ diff --git a/meson/test cases/common/14 configure file/config4b.h.in b/meson/test cases/common/14 configure file/config4b.h.in new file mode 100644 index 000000000..3408bab6f --- /dev/null +++ b/meson/test cases/common/14 configure file/config4b.h.in @@ -0,0 +1,2 @@ +/* Dummy file */ +#define RESULTB @ZERO@ diff --git a/meson/test cases/common/14 configure file/config5.h.in b/meson/test cases/common/14 configure file/config5.h.in new file mode 100644 index 000000000..323bec64a --- /dev/null +++ b/meson/test cases/common/14 configure file/config5.h.in @@ -0,0 +1 @@ +#define MESSAGE "@var@" diff --git a/meson/test cases/common/14 configure file/config6.h.in b/meson/test cases/common/14 configure file/config6.h.in new file mode 100644 index 000000000..9719f8715 --- /dev/null +++ b/meson/test cases/common/14 configure file/config6.h.in @@ -0,0 +1,19 @@ +/* No escape */ +#define MESSAGE1 "@var1@" + +/* Single escape means no replace */ +#define MESSAGE2 "\@var1@" + +/* Replace pairs of escapes before '@' or '\@' with escape characters + * (note we have to double number of pairs due to C string escaping) + */ +#define MESSAGE3 "\\\\@var1@" + +/* Pairs of escapes and then single escape to avoid replace */ +#define MESSAGE4 "\\\\\@var1@" + +/* Check escaped variable does not overlap following variable */ +#define MESSAGE5 "\@var1@var2@" + +/* Check escape character outside variables */ +#define MESSAGE6 "\\ @ \@ \\\\@ \\\\\@" diff --git a/meson/test cases/common/14 configure file/config7.h.in b/meson/test cases/common/14 configure file/config7.h.in new file mode 100644 index 000000000..edd0bb3fe --- /dev/null +++ b/meson/test cases/common/14 configure file/config7.h.in @@ -0,0 +1,16 @@ +/* No escape */ +#define MESSAGE1 "${var1}" + +/* Single escape means no replace */ +#define MESSAGE2 "\${var1}" + +/* Replace pairs of escapes before '@' or '\@' with escape characters + * (note we have to double number of pairs due to C string escaping) + */ +#define MESSAGE3 "\\\\${var1}" + +/* Pairs of escapes and then single escape to avoid replace */ +#define MESSAGE4 "\\\\\${var1}" + +/* Check escape character outside variables */ +#define MESSAGE5 "\\ ${ \${ \\\\${ \\\\\${" diff --git a/meson/test cases/common/14 configure file/config8.h.in b/meson/test cases/common/14 configure file/config8.h.in new file mode 100644 index 000000000..b854ea04e --- /dev/null +++ b/meson/test cases/common/14 configure file/config8.h.in @@ -0,0 +1,3 @@ +#define MESSAGE "@var@" + +#define "non isolatin1 char Ä fails decode with utf-8" diff --git a/meson/test cases/common/14 configure file/depfile b/meson/test cases/common/14 configure file/depfile new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/14 configure file/differentafterbasename1.in b/meson/test cases/common/14 configure file/differentafterbasename1.in new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/14 configure file/differentafterbasename2.in b/meson/test cases/common/14 configure file/differentafterbasename2.in new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/14 configure file/dummy.dat b/meson/test cases/common/14 configure file/dummy.dat new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/14 configure file/dumpprog.c b/meson/test cases/common/14 configure file/dumpprog.c new file mode 100644 index 000000000..9f63b23b9 --- /dev/null +++ b/meson/test cases/common/14 configure file/dumpprog.c @@ -0,0 +1,52 @@ +#define SHOULD_BE_UNDEFINED 1 + +#include"config3.h" +#include +#include + +#ifdef SHOULD_BE_UNDEFINED +#error Token did not get undefined. +#endif + +#ifndef SHOULD_BE_DEFINED +#error Token did not get defined +#endif + +#define stringify(s) str(s) +#define str(s) #s + +int main(void) { +#if !(SHOULD_BE_UNQUOTED_STRING == string) + printf("String token (unquoted) defined wrong.\n"); + return 1; +#endif + if(strcmp(SHOULD_BE_STRING, "string") != 0) { + printf("String token defined wrong.\n"); + return 1; + } + if(strcmp(SHOULD_BE_STRING2, "A \"B\" C") != 0) { + printf("String token 2 defined wrong.\n"); + return 1; + } + if(strcmp(SHOULD_BE_STRING3, "A \"\" C") != 0) { + printf("String token 3 defined wrong.\n"); + return 1; + } + if(strcmp(SHOULD_BE_STRING4, "A \" C") != 0) { + printf("String token 4 defined wrong.\n"); + return 1; + } + if(SHOULD_BE_ONE != 1) { + printf("One defined incorrectly.\n"); + return 1; + } + if(SHOULD_BE_ZERO != 0) { + printf("Zero defined incorrectly.\n"); + return 1; + } + if(strcmp(SHOULD_BE_QUOTED_ONE, "1") != 0) { + printf("Quoted number defined incorrectly.\n"); + return 1; + } + SHOULD_BE_RETURN 0; +} diff --git a/meson/test cases/common/14 configure file/file_contains.py b/meson/test cases/common/14 configure file/file_contains.py new file mode 100644 index 000000000..409f09c65 --- /dev/null +++ b/meson/test cases/common/14 configure file/file_contains.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 + +import sys +import argparse + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('file', nargs=1, type=str) + parser.add_argument('text', nargs=1, type=str) + args = parser.parse_args() + + text = args.text[0] + + with open(args.file[0], encoding='utf-8') as f: + for line in f: + if line.strip() == text: + return 0 + + return 1 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/meson/test cases/common/14 configure file/generator-deps.py b/meson/test cases/common/14 configure file/generator-deps.py new file mode 100755 index 000000000..cca253cca --- /dev/null +++ b/meson/test cases/common/14 configure file/generator-deps.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +import sys, os +from pathlib import Path + +if len(sys.argv) != 3: + print("Wrong amount of parameters.") + +build_dir = Path(os.environ['MESON_BUILD_ROOT']) +subdir = Path(os.environ['MESON_SUBDIR']) +outputf = Path(sys.argv[1]) + +with outputf.open('w') as ofile: + ofile.write("#define ZERO_RESULT 0\n") + +depf = Path(sys.argv[2]) +if not depf.exists(): + with depf.open('w') as ofile: + ofile.write(f"{outputf.name}: depfile\n") diff --git a/meson/test cases/common/14 configure file/generator-without-input-file.py b/meson/test cases/common/14 configure file/generator-without-input-file.py new file mode 100755 index 000000000..2ee059ee2 --- /dev/null +++ b/meson/test cases/common/14 configure file/generator-without-input-file.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import sys, os +from pathlib import Path + +if len(sys.argv) != 2: + print("Wrong amount of parameters.") + +build_dir = Path(os.environ['MESON_BUILD_ROOT']) +subdir = Path(os.environ['MESON_SUBDIR']) +outputf = Path(sys.argv[1]) + +with outputf.open('w') as ofile: + ofile.write("#define ZERO_RESULT 0\n") diff --git a/meson/test cases/common/14 configure file/generator.py b/meson/test cases/common/14 configure file/generator.py new file mode 100755 index 000000000..e3cc88101 --- /dev/null +++ b/meson/test cases/common/14 configure file/generator.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python3 + +import sys, os +from pathlib import Path + +if len(sys.argv) != 3: + print("Wrong amount of parameters.") + +build_dir = Path(os.environ['MESON_BUILD_ROOT']) +subdir = Path(os.environ['MESON_SUBDIR']) +inputf = Path(sys.argv[1]) +outputf = Path(sys.argv[2]) + +assert(inputf.exists()) + +with outputf.open('w') as ofile: + ofile.write("#define ZERO_RESULT 0\n") diff --git a/meson/test cases/common/14 configure file/invalid-utf8.bin.in b/meson/test cases/common/14 configure file/invalid-utf8.bin.in new file mode 100644 index 000000000..98e9ed9a9 Binary files /dev/null and b/meson/test cases/common/14 configure file/invalid-utf8.bin.in differ diff --git a/meson/test cases/common/14 configure file/meson.build b/meson/test cases/common/14 configure file/meson.build new file mode 100644 index 000000000..f7e0eeba6 --- /dev/null +++ b/meson/test cases/common/14 configure file/meson.build @@ -0,0 +1,309 @@ +project('configure file test', 'c') + +conf = configuration_data() + +conf.set('var', 'mystring') +conf.set('other', 'string 2') +conf.set('second', ' bonus') +conf.set('BE_TRUE', true) + +assert(conf.get('var') == 'mystring', 'Get function is not working.') +assert(conf.get('var', 'default') == 'mystring', 'Get function is not working.') +assert(conf.get('notthere', 'default') == 'default', 'Default value getting is not working.') +assert(conf.keys() == ['BE_TRUE', 'other', 'second', 'var'], 'Keys function is not working') + +cfile = configure_file(input : 'config.h.in', + output : 'config.h', + configuration : conf) + +e = executable('inctest', 'prog.c', +# Note that you should NOT do this. Don't add generated headers here +# This tests that we do the right thing even if people add in conf files +# to their sources. + cfile) +test('inctest', e) + +# Test if we can also pass files() as input +configure_file(input : files('config.h.in'), + output : 'config2.h', + configuration : conf) + +# Now generate a header file with an external script. +genprog = import('python3').find_python() +scriptfile = '@0@/generator.py'.format(meson.current_source_dir()) +ifile = '@0@/dummy.dat'.format(meson.current_source_dir()) +ofile = '@0@/config2.h'.format(meson.current_build_dir()) + +check_file = find_program('check_file.py') +# Configure in source root with command and absolute paths +outf = configure_file(input : 'dummy.dat', + output : 'config2.h', + command : [genprog, scriptfile, ifile, ofile], + install_dir : 'share/appdir') +ret = run_command(check_file, outf) +if ret.returncode() != 0 + error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) +endif + +# Same again as before, but an input file should not be required in +# this case where we use a command/script to generate the output file. +genscript2b = '@0@/generator-without-input-file.py'.format(meson.current_source_dir()) +ofile2b = '@0@/config2b.h'.format(meson.current_build_dir()) +outf = configure_file( + output : 'config2b.h', + command : [genprog, genscript2b, ofile2b], + install_dir : 'share/appdir') +ret = run_command(check_file, outf) +if ret.returncode() != 0 + error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) +endif + +genscript2deps = '@0@/generator-deps.py'.format(meson.current_source_dir()) +ofile2deps = '@0@/config2deps.h'.format(meson.current_build_dir()) +outf = configure_file( + output : 'config2deps.h', + depfile : 'depfile.d', + command : [genprog, genscript2deps, ofile2deps, '@DEPFILE@']) +ret = run_command(check_file, outf) +if ret.returncode() != 0 + error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) +endif + +found_script = find_program('generator.py') +# More configure_file tests in here +subdir('subdir') + +test('inctest2', executable('prog2', 'prog2.c')) + +# Generate a conf file without an input file. + +dump = configuration_data() +dump.set_quoted('SHOULD_BE_STRING', 'string', description : 'A string') +dump.set_quoted('SHOULD_BE_STRING2', 'A "B" C') +dump.set_quoted('SHOULD_BE_STRING3', 'A "" C') +dump.set_quoted('SHOULD_BE_STRING4', 'A " C') +dump.set('SHOULD_BE_RETURN', 'return') +dump.set('SHOULD_BE_DEFINED', true) +dump.set('SHOULD_BE_UNDEFINED', false) +dump.set('SHOULD_BE_ONE', 1) +dump.set('SHOULD_BE_ZERO', 0, description : 'Absolutely zero') +dump.set('SHOULD_BE_QUOTED_ONE', '"1"') + +dump.set_quoted('INTEGER_AS_STRING', '12') +if dump.get_unquoted('INTEGER_AS_STRING').to_int() == 12 + dump.set('SHOULD_BE_UNQUOTED_STRING', dump.get_unquoted('SHOULD_BE_STRING')) +endif + +configure_file(output : 'config3.h', + configuration : dump) + +test('Configless.', executable('dumpprog', 'dumpprog.c')) + + +# Config file generation in a loop with @BASENAME@ substitution +dump = configuration_data() +dump.set('ZERO', 0) +config_templates = files(['config4a.h.in', 'config4b.h.in']) +foreach config_template : config_templates + configure_file(input : config_template, output : '@BASENAME@', configuration : dump) +endforeach + +test('Substituted', executable('prog4', 'prog4.c')) + +# Test `capture` keyword + +basename_py = find_program('basename.py') +file_contains_py = find_program('file_contains.py') +test_string = 'hello world' +test_input_file = join_paths(meson.current_build_dir(), test_string) +run_command(find_program('touch.py'), test_input_file) +configs = [ + # no input + configure_file(command: [ basename_py, test_string ], capture: true, output: 'capture test 1'), + # with input + configure_file(input: test_input_file, command: [ basename_py, '@INPUT@' ], capture: true, output: 'capture test 2'), +] +foreach c : configs + test('@0@'.format(c), file_contains_py, args: [ c, test_string ]) +endforeach + +# Test variable is substituted only once +conf5 = configuration_data() +conf5.set('var', '@var2@') +conf5.set('var2', 'error') +configure_file( + input : 'config5.h.in', + output : '@BASENAME@', + configuration : conf5) +test('test5', executable('prog5', 'prog5.c')) + +# Test escaping +conf6 = configuration_data() +conf6.set('var1', 'foo') +conf6.set('var2', 'bar') +configure_file( + input : 'config6.h.in', + output : '@BASENAME@', + configuration : conf6) +test('test6', executable('prog6', 'prog6.c')) + +# test empty install dir string +cfile = configure_file(input : 'config.h.in', + output : 'do_not_get_installed.h', + install_dir : '', + configuration : conf) + +# test install_dir : false (deprecated) +cfile = configure_file(input : 'config.h.in', + output : 'do_not_get_installed_please.h', + install_dir : false, + configuration : conf) + +# test intsall_dir with install: false +cfile = configure_file(input : 'config.h.in', + output : 'do_not_get_installed_in_install_dir.h', + install : false, + install_dir : 'share/appdir', + configuration : conf) + +# Test escaping with cmake format +conf7 = configuration_data() +conf7.set('var1', 'foo') +conf7.set('var2', 'bar') +configure_file( + input : 'config7.h.in', + output : '@BASENAME@', + format : 'cmake', + configuration : conf7) +test('test7', executable('prog7', 'prog7.c')) + +# Test copying of an empty configuration data object +inf = 'invalid-utf8.bin.in' +outf = configure_file(input : inf, + output : 'invalid-utf8.bin', + copy: true) +ret = run_command(check_file, inf, outf) +if ret.returncode() != 0 + error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) +endif +# Now the same, but using a File object as an argument. +inf2 = files('invalid-utf8.bin.in')[0] +ret = run_command(check_file, inf2, outf) +if ret.returncode() != 0 + error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) +endif + +# Test copy of a binary file +outf = configure_file(input : inf, + output : 'somebinary.bin', + copy : true) +ret = run_command(check_file, inf, outf) +if ret.returncode() != 0 + error('Error running command: @0@\n@1@'.format(ret.stdout(), ret.stderr())) +endif + +# Test non isolatin1 encoded input file which fails to decode with utf-8 +conf8 = configuration_data() +conf8.set('var', 'foo') +configure_file( + input : 'config8.h.in', + output : '@BASENAME@', + encoding : 'koi8-r', + configuration : conf8) + +# Test that passing an empty configuration_data() object to a file with +# #mesondefine substitutions does not print the warning. +configure_file( + input: 'nosubst-nocopy1.txt.in', + output: 'nosubst-nocopy1.txt', + configuration : configuration_data()) + +# test that passing an empty configuration_data() object to a file with +# @foo@ substitutions does not print the warning. +configure_file( + input: 'nosubst-nocopy2.txt.in', + output: 'nosubst-nocopy2.txt', + configuration : configuration_data()) + +# test that passing a configured file object to test() works, and that passing +# an empty configuration_data() object to a file that leads to no substitutions +# prints a warning (see unit tests) +test_file = configure_file( + input: 'test.py.in', + output: 'test.py', + configuration: configuration_data()) + +# Test that overwriting an existing file creates a warning. +configure_file( + input: 'test.py.in', + output: 'double_output.txt', + configuration: conf) +configure_file( + input: 'test.py.in', + output: 'double_output.txt', + configuration: conf) + +# Test that the same file name in a different subdir will not create a warning +configure_file( + input: 'test.py.in', + output: 'no_write_conflict.txt', + configuration: conf) + +# Test that @BASENAME@ is substituted before checking and does not create a warning. +configure_file( + input: 'differentafterbasename1.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'differentafterbasename2.in', + output: '@BASENAME@', + configuration: conf +) + +# Test that @BASENAME@ is substituted before checking and does create a warning on conflict. +configure_file( + input: 'sameafterbasename.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'sameafterbasename.in2', + output: '@BASENAME@', + configuration: conf +) + +test('configure-file', test_file) + +cdata = configuration_data() +cdata.set('invalid_value', ['array']) + +# Dictionaries + +cdata = configuration_data({ + 'A_STRING' : '"foo"', + 'A_INT' : 42, + 'A_DEFINED' : true, + 'A_UNDEFINED' : false, +}) + +configure_file(output : 'config9a.h', + configuration : cdata, +) + +configure_file(output : 'config9b.h', + configuration : { + 'B_STRING' : '"foo"', + 'B_INT' : 42, + 'B_DEFINED' : true, + 'B_UNDEFINED' : false, + } +) + +test('test9', executable('prog9', 'prog9.c')) + +check_inputs = find_program('check_inputs.py') +configure_file(output : 'check_inputs.txt', + input : ['prog.c', files('prog2.c', 'prog4.c')], + command : [check_inputs, '@OUTPUT@', '@INPUT0@', '@INPUT@', files('prog5.c')] +) diff --git a/meson/test cases/common/14 configure file/nosubst-nocopy1.txt.in b/meson/test cases/common/14 configure file/nosubst-nocopy1.txt.in new file mode 100644 index 000000000..6e893a105 --- /dev/null +++ b/meson/test cases/common/14 configure file/nosubst-nocopy1.txt.in @@ -0,0 +1 @@ +#mesondefine FOO_BAR diff --git a/meson/test cases/common/14 configure file/nosubst-nocopy2.txt.in b/meson/test cases/common/14 configure file/nosubst-nocopy2.txt.in new file mode 100644 index 000000000..a6a7ccad2 --- /dev/null +++ b/meson/test cases/common/14 configure file/nosubst-nocopy2.txt.in @@ -0,0 +1 @@ +@FOO_BAR@ diff --git a/meson/test cases/common/14 configure file/prog.c b/meson/test cases/common/14 configure file/prog.c new file mode 100644 index 000000000..85e66b97e --- /dev/null +++ b/meson/test cases/common/14 configure file/prog.c @@ -0,0 +1,17 @@ +#include +/* config.h must not be in quotes: + * https://gcc.gnu.org/onlinedocs/cpp/Search-Path.html + */ +#include + +#ifdef SHOULD_BE_UNDEF +#error "FAIL!" +#endif + +int main(void) { +#ifndef BE_TRUE + return 1; +#else + return strcmp(MESSAGE, "mystring"); +#endif +} diff --git a/meson/test cases/common/14 configure file/prog2.c b/meson/test cases/common/14 configure file/prog2.c new file mode 100644 index 000000000..8b90bfb52 --- /dev/null +++ b/meson/test cases/common/14 configure file/prog2.c @@ -0,0 +1,5 @@ +#include + +int main(void) { + return ZERO_RESULT; +} diff --git a/meson/test cases/common/14 configure file/prog4.c b/meson/test cases/common/14 configure file/prog4.c new file mode 100644 index 000000000..1e32a3129 --- /dev/null +++ b/meson/test cases/common/14 configure file/prog4.c @@ -0,0 +1,6 @@ +#include +#include + +int main(void) { + return RESULTA + RESULTB; +} diff --git a/meson/test cases/common/14 configure file/prog5.c b/meson/test cases/common/14 configure file/prog5.c new file mode 100644 index 000000000..1a8f78523 --- /dev/null +++ b/meson/test cases/common/14 configure file/prog5.c @@ -0,0 +1,6 @@ +#include +#include + +int main(void) { + return strcmp(MESSAGE, "@var2@"); +} diff --git a/meson/test cases/common/14 configure file/prog6.c b/meson/test cases/common/14 configure file/prog6.c new file mode 100644 index 000000000..57f558605 --- /dev/null +++ b/meson/test cases/common/14 configure file/prog6.c @@ -0,0 +1,11 @@ +#include +#include + +int main(void) { + return strcmp(MESSAGE1, "foo") + || strcmp(MESSAGE2, "@var1@") + || strcmp(MESSAGE3, "\\foo") + || strcmp(MESSAGE4, "\\@var1@") + || strcmp(MESSAGE5, "@var1bar") + || strcmp(MESSAGE6, "\\ @ @ \\@ \\@"); +} diff --git a/meson/test cases/common/14 configure file/prog7.c b/meson/test cases/common/14 configure file/prog7.c new file mode 100644 index 000000000..802bc46e5 --- /dev/null +++ b/meson/test cases/common/14 configure file/prog7.c @@ -0,0 +1,10 @@ +#include +#include + +int main(void) { + return strcmp(MESSAGE1, "foo") + || strcmp(MESSAGE2, "${var1}") + || strcmp(MESSAGE3, "\\foo") + || strcmp(MESSAGE4, "\\${var1}") + || strcmp(MESSAGE5, "\\ ${ ${ \\${ \\${"); +} diff --git a/meson/test cases/common/14 configure file/prog9.c b/meson/test cases/common/14 configure file/prog9.c new file mode 100644 index 000000000..3f7760129 --- /dev/null +++ b/meson/test cases/common/14 configure file/prog9.c @@ -0,0 +1,18 @@ +#include +#include +#include + +#if defined(A_UNDEFINED) || defined(B_UNDEFINED) +#error "Should not be defined" +#endif + +#if !defined(A_DEFINED) || !defined(B_DEFINED) +#error "Should be defined" +#endif + +int main(void) { + return strcmp(A_STRING, "foo") + || strcmp(B_STRING, "foo") + || A_INT != 42 + || B_INT != 42; +} diff --git a/meson/test cases/common/14 configure file/sameafterbasename.in b/meson/test cases/common/14 configure file/sameafterbasename.in new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/14 configure file/sameafterbasename.in2 b/meson/test cases/common/14 configure file/sameafterbasename.in2 new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/14 configure file/subdir/meson.build b/meson/test cases/common/14 configure file/subdir/meson.build new file mode 100644 index 000000000..146b7b66c --- /dev/null +++ b/meson/test cases/common/14 configure file/subdir/meson.build @@ -0,0 +1,38 @@ +# Configure in subdir with absolute paths for input and relative for output +configure_file(input : '../dummy.dat', + output : 'config2-1.h', + command : [genprog, scriptfile, ifile, 'config2-1.h'], + install_dir : 'share/appdireh') +run_command(check_file, join_paths(meson.current_build_dir(), 'config2-1.h')) + +# Configure in subdir with files() for input and relative for output +configure_file(input : '../dummy.dat', + output : 'config2-2.h', + command : [genprog, scriptfile, files('../dummy.dat'), 'config2-2.h'], + install_dir : 'share/appdirok') +run_command(check_file, join_paths(meson.current_build_dir(), 'config2-2.h')) + +# Configure in subdir with string templates for input and output +configure_file(input : '../dummy.dat', + output : 'config2-3.h', + command : [found_script, '@INPUT@', '@OUTPUT@']) +run_command(check_file, join_paths(meson.current_build_dir(), 'config2-3.h')) + +# Test that overwriting an existing file creates a warning. +configure_file( + input: '../test.py.in', + output: 'double_output2.txt', + configuration: conf +) +configure_file( + input: '../test.py.in', + output: 'double_output2.txt', + configuration: conf +) + +# Test that the same file name in a different subdir will not create a warning +configure_file( + input: '../test.py.in', + output: 'no_write_conflict.txt', + configuration: conf +) diff --git a/meson/test cases/common/14 configure file/test.json b/meson/test cases/common/14 configure file/test.json new file mode 100644 index 000000000..92f7b1896 --- /dev/null +++ b/meson/test cases/common/14 configure file/test.json @@ -0,0 +1,8 @@ +{ + "installed": [ + {"type": "file", "file": "usr/share/appdir/config2.h"}, + {"type": "file", "file": "usr/share/appdir/config2b.h"}, + {"type": "file", "file": "usr/share/appdireh/config2-1.h"}, + {"type": "file", "file": "usr/share/appdirok/config2-2.h"} + ] +} diff --git a/meson/test cases/common/14 configure file/test.py.in b/meson/test cases/common/14 configure file/test.py.in new file mode 100644 index 000000000..15a61f578 --- /dev/null +++ b/meson/test cases/common/14 configure file/test.py.in @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 + +import sys +sys.exit(0) diff --git a/meson/test cases/common/14 configure file/touch.py b/meson/test cases/common/14 configure file/touch.py new file mode 100644 index 000000000..b48f48162 --- /dev/null +++ b/meson/test cases/common/14 configure file/touch.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import sys +import argparse +from pathlib import Path + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('files', nargs='*', type=str) + args = parser.parse_args() + + for filepath in args.files: + Path(filepath).touch() + +if __name__ == '__main__': + sys.exit(main()) diff --git a/meson/test cases/common/140 custom target multiple outputs/generator.py b/meson/test cases/common/140 custom target multiple outputs/generator.py new file mode 100755 index 000000000..39dbd11c4 --- /dev/null +++ b/meson/test cases/common/140 custom target multiple outputs/generator.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import sys, os + +if len(sys.argv) != 3: + print(sys.argv[0], '', '') + +name = sys.argv[1] +odir = sys.argv[2] + +with open(os.path.join(odir, name + '.h'), 'w') as f: + f.write('int func();\n') +with open(os.path.join(odir, name + '.sh'), 'w') as f: + f.write('#!/bin/bash') diff --git a/meson/test cases/common/140 custom target multiple outputs/meson.build b/meson/test cases/common/140 custom target multiple outputs/meson.build new file mode 100644 index 000000000..30305056b --- /dev/null +++ b/meson/test cases/common/140 custom target multiple outputs/meson.build @@ -0,0 +1,44 @@ +project('multiple outputs install', 'c') + +gen = find_program('generator.py') + +custom_target('different-install-dirs', + output : ['diff.h', 'diff.sh'], + command : [gen, 'diff', '@OUTDIR@'], + install : true, + install_dir : [join_paths(get_option('prefix'), get_option('includedir')), + join_paths(get_option('prefix'), get_option('bindir'))]) + +custom_target('same-install-dir', + output : ['same.h', 'same.sh'], + command : [gen, 'same', '@OUTDIR@'], + install : true, + install_dir : '/opt') + +custom_target('only-install-first', + output : ['first.h', 'first.sh'], + command : [gen, 'first', '@OUTDIR@'], + install : true, + install_dir : [join_paths(get_option('prefix'), get_option('includedir')), false]) + +targets = custom_target('only-install-second', + output : ['second.h', 'second.sh'], + command : [gen, 'second', '@OUTDIR@'], + install : true, + install_dir : [false, join_paths(get_option('prefix'), get_option('bindir'))]) + +paths = [] +foreach i : targets.to_list() + paths += i.full_path() +endforeach + +# The Xcode backend has a different output naming scheme. +if meson.backend() == 'xcode' + assert(paths == [meson.project_build_root() / get_option('buildtype') / 'second.h', + meson.project_build_root() / get_option('buildtype') / 'second.sh']) + +# Skip on Windows because paths are not identical, '/' VS '\'. +elif host_machine.system() != 'windows' + assert(paths == [meson.current_build_dir() / 'second.h', + meson.current_build_dir() / 'second.sh']) +endif diff --git a/meson/test cases/common/140 custom target multiple outputs/test.json b/meson/test cases/common/140 custom target multiple outputs/test.json new file mode 100644 index 000000000..e59cb9f83 --- /dev/null +++ b/meson/test cases/common/140 custom target multiple outputs/test.json @@ -0,0 +1,10 @@ +{ + "installed": [ + {"type": "file", "file": "usr/include/diff.h"}, + {"type": "file", "file": "usr/include/first.h"}, + {"type": "file", "file": "usr/bin/diff.sh"}, + {"type": "file", "file": "usr/bin/second.sh"}, + {"type": "file", "file": "opt/same.h"}, + {"type": "file", "file": "opt/same.sh"} + ] +} diff --git a/meson/test cases/common/141 special characters/arg-char-test.c b/meson/test cases/common/141 special characters/arg-char-test.c new file mode 100644 index 000000000..04e02f8e8 --- /dev/null +++ b/meson/test cases/common/141 special characters/arg-char-test.c @@ -0,0 +1,10 @@ +#include +#include + +int main(int argc, char **argv) { + char c = CHAR; + assert(argc == 2); + if (c != argv[1][0]) + fprintf(stderr, "Expected %x, got %x\n", (unsigned int) c, (unsigned int) argv[1][0]); + assert(c == argv[1][0]); +} diff --git a/meson/test cases/common/141 special characters/arg-string-test.c b/meson/test cases/common/141 special characters/arg-string-test.c new file mode 100644 index 000000000..199fd7917 --- /dev/null +++ b/meson/test cases/common/141 special characters/arg-string-test.c @@ -0,0 +1,12 @@ +#include +#include +#include + +int main(int argc, char **argv) { + const char *s = CHAR; + assert(argc == 2); + assert(strlen(s) == 1); + if (s[0] != argv[1][0]) + fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]); + assert(s[0] == argv[1][0]); +} diff --git a/meson/test cases/common/141 special characters/arg-unquoted-test.c b/meson/test cases/common/141 special characters/arg-unquoted-test.c new file mode 100644 index 000000000..7f679ca68 --- /dev/null +++ b/meson/test cases/common/141 special characters/arg-unquoted-test.c @@ -0,0 +1,17 @@ +#include +#include +#include + +#define Q(x) #x +#define QUOTE(x) Q(x) + +int main(int argc, char **argv) { + const char *s = QUOTE(CHAR); + assert(argc == 2); + assert(strlen(s) == 1); + if (s[0] != argv[1][0]) + fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]); + assert(s[0] == argv[1][0]); + // There is no way to convert a macro argument into a character constant. + // Otherwise we'd test that as well +} diff --git a/meson/test cases/common/141 special characters/check_quoting.py b/meson/test cases/common/141 special characters/check_quoting.py new file mode 100644 index 000000000..d6e50ea69 --- /dev/null +++ b/meson/test cases/common/141 special characters/check_quoting.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +import sys + +expected = { + 'newline': '\n', + 'dollar': '$', + 'colon': ':', + 'space': ' ', + 'multi1': ' ::$$ ::$$', + 'multi2': ' ::$$\n\n \n\n::$$', +} + +output = None + +for arg in sys.argv[1:]: + try: + name, value = arg.split('=', 1) + except ValueError: + output = arg + continue + + if expected[name] != value: + raise RuntimeError('{!r} is {!r} but should be {!r}'.format(name, value, expected[name])) + +if output is not None: + with open(output, 'w') as f: + f.write('Success!') diff --git a/meson/test cases/common/141 special characters/meson.build b/meson/test cases/common/141 special characters/meson.build new file mode 100644 index 000000000..579601e80 --- /dev/null +++ b/meson/test cases/common/141 special characters/meson.build @@ -0,0 +1,75 @@ +project('ninja special characters' ,'c') + +python = import('python3').find_python() + +# Without newlines, this should appear directly in build.ninja. +gen = custom_target('gen', + command : [ + python, + files('check_quoting.py'), + 'dollar=$', + 'colon=:', + 'space= ', + '''multi1= ::$$ ::$$''', + '@OUTPUT@'], + output : 'result', + install : true, + install_dir : get_option('datadir')) + +# With newlines, this should go through the exe wrapper. +gen2 = custom_target('gen2', + command : [ + python, + files('check_quoting.py'), + '''newline= +''', + 'dollar=$', + 'colon=:', + 'space= ', + '''multi2= ::$$ + + + +::$$''', + '@OUTPUT@'], + output : 'result2', + install : true, + install_dir : get_option('datadir')) + +# Test that we can pass these special characters in compiler arguments +# +# (this part of the test is crafted so we don't try to use these special +# characters in filenames or target names) +# +# TODO: similar tests needed for languages other than C +# TODO: add similar test for quote, doublequote, and hash, carefully +# Re hash, see +# https://docs.microsoft.com/en-us/cpp/build/reference/d-preprocessor-definitions + +special = [ + ['amp', '&'], + ['at', '@'], + ['backslash', '\\'], + ['dollar', '$'], + ['gt', '>'], + ['lt', '<'], + ['slash', '/'], +] + +cc = meson.get_compiler('c') + +foreach s : special + args = '-DCHAR="@0@"'.format(s[1]) + e = executable('arg-string-' + s[0], 'arg-string-test.c', c_args: args) + test('arg-string-' + s[0], e, args: s[1]) + + args = '-DCHAR=@0@'.format(s[1]) + e = executable('arg-unquoted-' + s[0], 'arg-unquoted-test.c', c_args: args) + test('arg-unquoted-' + s[0], e, args: s[1]) +endforeach + +foreach s : special + args = '-DCHAR=\'@0@\''.format(s[1]) + e = executable('arg-char-' + s[0], 'arg-char-test.c', c_args: args) + test('arg-char-' + s[0], e, args: s[1]) +endforeach diff --git a/meson/test cases/common/141 special characters/test.json b/meson/test cases/common/141 special characters/test.json new file mode 100644 index 000000000..9709e5bc6 --- /dev/null +++ b/meson/test cases/common/141 special characters/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "file", "file": "usr/share/result"}, + {"type": "file", "file": "usr/share/result2"} + ] +} diff --git a/meson/test cases/common/142 nested links/meson.build b/meson/test cases/common/142 nested links/meson.build new file mode 100644 index 000000000..0821b038d --- /dev/null +++ b/meson/test cases/common/142 nested links/meson.build @@ -0,0 +1,8 @@ +project('test', 'c') + +libxserver_dri3 = [] +libxserver = [ libxserver_dri3 ] + +executable('Xephyr', 'xephyr.c', link_with: [ libxserver ]) + +executable('Zephyr', 'xephyr.c', link_args: [[], []]) diff --git a/meson/test cases/common/142 nested links/xephyr.c b/meson/test cases/common/142 nested links/xephyr.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/common/142 nested links/xephyr.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/143 list of file sources/foo b/meson/test cases/common/143 list of file sources/foo new file mode 100644 index 000000000..7b57bd29e --- /dev/null +++ b/meson/test cases/common/143 list of file sources/foo @@ -0,0 +1 @@ +some text diff --git a/meson/test cases/common/143 list of file sources/gen.py b/meson/test cases/common/143 list of file sources/gen.py new file mode 100644 index 000000000..2337d3d6b --- /dev/null +++ b/meson/test cases/common/143 list of file sources/gen.py @@ -0,0 +1,7 @@ +import shutil +import sys + +if __name__ == '__main__': + if len(sys.argv) != 3: + raise Exception('Requires exactly 2 args') + shutil.copy2(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/143 list of file sources/meson.build b/meson/test cases/common/143 list of file sources/meson.build new file mode 100644 index 000000000..819509dd6 --- /dev/null +++ b/meson/test cases/common/143 list of file sources/meson.build @@ -0,0 +1,12 @@ +project('test', 'c') + +mod_py = import('python3') +python = mod_py.find_python() + +test_target = custom_target( + 'test_target', + input : [files('gen.py'), files('foo')], + output : 'bar', + command : [python, '@INPUT0@', '@INPUT1@', '@OUTPUT@'], + build_by_default : true, +) diff --git a/meson/test cases/common/144 link depends custom target/foo.c b/meson/test cases/common/144 link depends custom target/foo.c new file mode 100644 index 000000000..58c86a62b --- /dev/null +++ b/meson/test cases/common/144 link depends custom target/foo.c @@ -0,0 +1,15 @@ +#include + +int main(void) { + const char *fn = DEPFILE; + FILE *f = fopen(fn, "r"); + if (!f) { + printf("could not open %s", fn); + return 1; + } + else { + printf("successfully opened %s", fn); + } + + return 0; +} diff --git a/meson/test cases/common/144 link depends custom target/make_file.py b/meson/test cases/common/144 link depends custom target/make_file.py new file mode 100755 index 000000000..ceb6e19aa --- /dev/null +++ b/meson/test cases/common/144 link depends custom target/make_file.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 +import sys + +with open(sys.argv[1], 'w') as f: + print('# this file does nothing', file=f) diff --git a/meson/test cases/common/144 link depends custom target/meson.build b/meson/test cases/common/144 link depends custom target/meson.build new file mode 100644 index 000000000..ee7a865d9 --- /dev/null +++ b/meson/test cases/common/144 link depends custom target/meson.build @@ -0,0 +1,19 @@ +project('link_depends_custom_target', 'c') + +if meson.backend().startswith('vs') + # FIXME: Broken on the VS backends + error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799') +endif + +cmd = find_program('make_file.py') + +dep_file = custom_target('gen_dep', + command: [cmd, '@OUTPUT@'], + output: 'dep_file') + +exe = executable('foo', 'foo.c', + link_depends: dep_file, + c_args: ['-DDEPFILE="' + dep_file.full_path()+ '"']) + +# check that dep_file exists, which means that link_depends target ran +test('runtest', exe) diff --git a/meson/test cases/common/145 recursive linking/3rdorderdeps/lib.c.in b/meson/test cases/common/145 recursive linking/3rdorderdeps/lib.c.in new file mode 100644 index 000000000..461f85995 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/3rdorderdeps/lib.c.in @@ -0,0 +1,8 @@ +#include "../lib.h" + +int get_@DEPENDENCY@dep_value (void); + +SYMBOL_EXPORT +int get_@LIBTYPE@@DEPENDENCY@dep_value (void) { + return get_@DEPENDENCY@dep_value (); +} diff --git a/meson/test cases/common/145 recursive linking/3rdorderdeps/main.c.in b/meson/test cases/common/145 recursive linking/3rdorderdeps/main.c.in new file mode 100644 index 000000000..643c24694 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/3rdorderdeps/main.c.in @@ -0,0 +1,16 @@ +#include + +#include "../lib.h" + +SYMBOL_IMPORT int get_@LIBTYPE@@DEPENDENCY@dep_value (void); + +int main(void) { + int val; + + val = get_@LIBTYPE@@DEPENDENCY@dep_value (); + if (val != @VALUE@) { + printf("@LIBTYPE@@DEPENDENCY@ was %i instead of @VALUE@\n", val); + return -1; + } + return 0; +} diff --git a/meson/test cases/common/145 recursive linking/3rdorderdeps/meson.build b/meson/test cases/common/145 recursive linking/3rdorderdeps/meson.build new file mode 100644 index 000000000..4c5ac7307 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/3rdorderdeps/meson.build @@ -0,0 +1,49 @@ +dep3_libs = [] + +# Permutate all combinations of shared and static libraries up to three levels +# executable -> shared -> static -> shared (etc) +foreach dep2 : ['sh', 'st'] + foreach dep1 : ['sh', 'st'] + foreach libtype : ['sh', 'st'] + name = libtype + dep1 + dep2 + if dep2 == 'sh' + libret = 1 + elif dep2 == 'st' + libret = 2 + else + error('Unknown dep2 "@0@"'.format(dep2)) + endif + + if libtype == 'sh' + target = 'shared_library' + build_args = [] + elif libtype == 'st' + target = 'static_library' + build_args = ['-DMESON_STATIC_BUILD'] + else + error('Unknown libtype "@0@"'.format(libtype)) + endif + + cdata = configuration_data() + cdata.set('DEPENDENCY', dep1 + dep2) + cdata.set('LIBTYPE', libtype) + cdata.set('VALUE', libret) + + lib_c = configure_file(input : 'lib.c.in', + output : name + '-lib.c', + configuration : cdata) + dep = get_variable(dep1 + dep2 + 'dep') + dep3_lib = build_target(name, lib_c, link_with : dep, + target_type : target, + c_args : build_args) + dep3_libs += [dep3_lib] + + main_c = configure_file(input : 'main.c.in', + output : name + '-main.c', + configuration : cdata) + dep3_bin = executable(name + '_test', main_c, link_with : dep3_lib, + c_args : build_args) + test(name + 'test', dep3_bin) + endforeach + endforeach +endforeach diff --git a/meson/test cases/common/145 recursive linking/circular/lib1.c b/meson/test cases/common/145 recursive linking/circular/lib1.c new file mode 100644 index 000000000..38889cfa0 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/lib1.c @@ -0,0 +1,6 @@ +int get_st2_prop (void); +int get_st3_prop (void); + +int get_st1_value (void) { + return get_st2_prop () + get_st3_prop (); +} diff --git a/meson/test cases/common/145 recursive linking/circular/lib2.c b/meson/test cases/common/145 recursive linking/circular/lib2.c new file mode 100644 index 000000000..31cd37cc1 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/lib2.c @@ -0,0 +1,6 @@ +int get_st1_prop (void); +int get_st3_prop (void); + +int get_st2_value (void) { + return get_st1_prop () + get_st3_prop (); +} diff --git a/meson/test cases/common/145 recursive linking/circular/lib3.c b/meson/test cases/common/145 recursive linking/circular/lib3.c new file mode 100644 index 000000000..67d473aac --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/lib3.c @@ -0,0 +1,6 @@ +int get_st1_prop (void); +int get_st2_prop (void); + +int get_st3_value (void) { + return get_st1_prop () + get_st2_prop (); +} diff --git a/meson/test cases/common/145 recursive linking/circular/main.c b/meson/test cases/common/145 recursive linking/circular/main.c new file mode 100644 index 000000000..164abdf85 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/main.c @@ -0,0 +1,28 @@ +#include + +#include "../lib.h" + +int get_st1_value (void); +int get_st2_value (void); +int get_st3_value (void); + +int main(void) { + int val; + + val = get_st1_value (); + if (val != 5) { + printf("st1 value was %i instead of 5\n", val); + return -1; + } + val = get_st2_value (); + if (val != 4) { + printf("st2 value was %i instead of 4\n", val); + return -2; + } + val = get_st3_value (); + if (val != 3) { + printf("st3 value was %i instead of 3\n", val); + return -3; + } + return 0; +} diff --git a/meson/test cases/common/145 recursive linking/circular/meson.build b/meson/test cases/common/145 recursive linking/circular/meson.build new file mode 100644 index 000000000..b7a70a86b --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/meson.build @@ -0,0 +1,5 @@ +st1 = static_library('st1', 'lib1.c', 'prop1.c') +st2 = static_library('st2', 'lib2.c', 'prop2.c') +st3 = static_library('st3', 'lib3.c', 'prop3.c') + +test('circular', executable('circular', 'main.c', link_with : [st1, st2, st3])) diff --git a/meson/test cases/common/145 recursive linking/circular/prop1.c b/meson/test cases/common/145 recursive linking/circular/prop1.c new file mode 100644 index 000000000..4e571f5ee --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/prop1.c @@ -0,0 +1,3 @@ +int get_st1_prop (void) { + return 1; +} diff --git a/meson/test cases/common/145 recursive linking/circular/prop2.c b/meson/test cases/common/145 recursive linking/circular/prop2.c new file mode 100644 index 000000000..ceabba055 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/prop2.c @@ -0,0 +1,3 @@ +int get_st2_prop (void) { + return 2; +} diff --git a/meson/test cases/common/145 recursive linking/circular/prop3.c b/meson/test cases/common/145 recursive linking/circular/prop3.c new file mode 100644 index 000000000..246206c2c --- /dev/null +++ b/meson/test cases/common/145 recursive linking/circular/prop3.c @@ -0,0 +1,3 @@ +int get_st3_prop (void) { + return 3; +} diff --git a/meson/test cases/common/145 recursive linking/edge-cases/libsto.c b/meson/test cases/common/145 recursive linking/edge-cases/libsto.c new file mode 100644 index 000000000..93f46a88c --- /dev/null +++ b/meson/test cases/common/145 recursive linking/edge-cases/libsto.c @@ -0,0 +1,8 @@ +#include "../lib.h" + +int get_builto_value (void); + +SYMBOL_EXPORT +int get_stodep_value (void) { + return get_builto_value (); +} diff --git a/meson/test cases/common/145 recursive linking/edge-cases/meson.build b/meson/test cases/common/145 recursive linking/edge-cases/meson.build new file mode 100644 index 000000000..6a46266b4 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/edge-cases/meson.build @@ -0,0 +1,9 @@ +# Test https://github.com/mesonbuild/meson/issues/2096 +# Note that removing 'shnodep' from link_with: makes the error go away because +# then it is added after the static library is added to the link command. +test('shared-static', executable('shstexe', 'shstmain.c', link_with : [shnodep, stshdep])) + +# Static library that needs a symbol defined in an object file. This already +# works, but good to add a test case early. +stodep = static_library('stodep', 'libsto.c') +test('stodep', executable('stodep', 'stomain.c', 'stobuilt.c', link_with : stodep)) diff --git a/meson/test cases/common/145 recursive linking/edge-cases/shstmain.c b/meson/test cases/common/145 recursive linking/edge-cases/shstmain.c new file mode 100644 index 000000000..334f86788 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/edge-cases/shstmain.c @@ -0,0 +1,16 @@ +#include + +#include "../lib.h" + +int get_stshdep_value (void); + +int main(void) { + int val; + + val = get_stshdep_value (); + if (val != 1) { + printf("st1 value was %i instead of 1\n", val); + return -1; + } + return 0; +} diff --git a/meson/test cases/common/145 recursive linking/edge-cases/stobuilt.c b/meson/test cases/common/145 recursive linking/edge-cases/stobuilt.c new file mode 100644 index 000000000..9cc15bcae --- /dev/null +++ b/meson/test cases/common/145 recursive linking/edge-cases/stobuilt.c @@ -0,0 +1,7 @@ +#include "../lib.h" + + +SYMBOL_EXPORT +int get_builto_value (void) { + return 1; +} diff --git a/meson/test cases/common/145 recursive linking/edge-cases/stomain.c b/meson/test cases/common/145 recursive linking/edge-cases/stomain.c new file mode 100644 index 000000000..a16a89b31 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/edge-cases/stomain.c @@ -0,0 +1,16 @@ +#include + +#include "../lib.h" + +int get_stodep_value (void); + +int main(void) { + int val; + + val = get_stodep_value (); + if (val != 1) { + printf("st1 value was %i instead of 1\n", val); + return -1; + } + return 0; +} diff --git a/meson/test cases/common/145 recursive linking/lib.h b/meson/test cases/common/145 recursive linking/lib.h new file mode 100644 index 000000000..b54bf3673 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/lib.h @@ -0,0 +1,17 @@ +#if defined _WIN32 + #ifdef MESON_STATIC_BUILD + #define SYMBOL_EXPORT + #define SYMBOL_IMPORT + #else + #define SYMBOL_IMPORT __declspec(dllimport) + #define SYMBOL_EXPORT __declspec(dllexport) + #endif +#else + #define SYMBOL_IMPORT + #if defined __GNUC__ + #define SYMBOL_EXPORT __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define SYMBOL_EXPORT + #endif +#endif diff --git a/meson/test cases/common/145 recursive linking/main.c b/meson/test cases/common/145 recursive linking/main.c new file mode 100644 index 000000000..cf091d089 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/main.c @@ -0,0 +1,46 @@ +#include + +#include "lib.h" + +int get_stnodep_value (void); +int get_stshdep_value (void); +int get_ststdep_value (void); +SYMBOL_IMPORT int get_shnodep_value (void); +SYMBOL_IMPORT int get_shshdep_value (void); +SYMBOL_IMPORT int get_shstdep_value (void); + +int main(void) { + int val; + + val = get_shnodep_value (); + if (val != 1) { + printf("shnodep was %i instead of 1\n", val); + return -1; + } + val = get_stnodep_value (); + if (val != 2) { + printf("stnodep was %i instead of 2\n", val); + return -2; + } + val = get_shshdep_value (); + if (val != 1) { + printf("shshdep was %i instead of 1\n", val); + return -3; + } + val = get_shstdep_value (); + if (val != 2) { + printf("shstdep was %i instead of 2\n", val); + return -4; + } + val = get_stshdep_value (); + if (val != 1) { + printf("shstdep was %i instead of 1\n", val); + return -5; + } + val = get_ststdep_value (); + if (val != 2) { + printf("ststdep was %i instead of 2\n", val); + return -6; + } + return 0; +} diff --git a/meson/test cases/common/145 recursive linking/meson.build b/meson/test cases/common/145 recursive linking/meson.build new file mode 100644 index 000000000..2ca715157 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/meson.build @@ -0,0 +1,29 @@ +project('recursive dependencies', 'c') + +# Test that you can link a shared executable to: +# - A shared library with no other deps +subdir('shnodep') +# - A static library with no other deps +subdir('stnodep') +# - A shared library with a shared library dep +subdir('shshdep') +# - A shared library with a static library dep +subdir('shstdep') +# - A static library with a shared library dep +subdir('stshdep') +# - A static library with a static library dep +subdir('ststdep') + +test('alldeps', + executable('alldeps', 'main.c', + link_with : [shshdep, shstdep, ststdep, stshdep])) + +# More combinations of static and shared libraries +subdir('3rdorderdeps') + +# Circular dependencies between static libraries +# This requires the use of --start/end-group with GNU ld +subdir('circular') + +# Various edge cases that have been reported +subdir('edge-cases') diff --git a/meson/test cases/common/145 recursive linking/shnodep/lib.c b/meson/test cases/common/145 recursive linking/shnodep/lib.c new file mode 100644 index 000000000..a3b7993c2 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/shnodep/lib.c @@ -0,0 +1,6 @@ +#include "../lib.h" + +SYMBOL_EXPORT +int get_shnodep_value (void) { + return 1; +} diff --git a/meson/test cases/common/145 recursive linking/shnodep/meson.build b/meson/test cases/common/145 recursive linking/shnodep/meson.build new file mode 100644 index 000000000..66cfd9bc6 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/shnodep/meson.build @@ -0,0 +1 @@ +shnodep = shared_library('shnodep', 'lib.c', version: '0.0.0') diff --git a/meson/test cases/common/145 recursive linking/shshdep/lib.c b/meson/test cases/common/145 recursive linking/shshdep/lib.c new file mode 100644 index 000000000..715d12092 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/shshdep/lib.c @@ -0,0 +1,8 @@ +#include "../lib.h" + +int get_shnodep_value (void); + +SYMBOL_EXPORT +int get_shshdep_value (void) { + return get_shnodep_value (); +} diff --git a/meson/test cases/common/145 recursive linking/shshdep/meson.build b/meson/test cases/common/145 recursive linking/shshdep/meson.build new file mode 100644 index 000000000..020b481cf --- /dev/null +++ b/meson/test cases/common/145 recursive linking/shshdep/meson.build @@ -0,0 +1 @@ +shshdep = shared_library('shshdep', 'lib.c', link_with : shnodep) diff --git a/meson/test cases/common/145 recursive linking/shstdep/lib.c b/meson/test cases/common/145 recursive linking/shstdep/lib.c new file mode 100644 index 000000000..5da8d0b2e --- /dev/null +++ b/meson/test cases/common/145 recursive linking/shstdep/lib.c @@ -0,0 +1,8 @@ +#include "../lib.h" + +int get_stnodep_value (void); + +SYMBOL_EXPORT +int get_shstdep_value (void) { + return get_stnodep_value (); +} diff --git a/meson/test cases/common/145 recursive linking/shstdep/meson.build b/meson/test cases/common/145 recursive linking/shstdep/meson.build new file mode 100644 index 000000000..008f9f893 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/shstdep/meson.build @@ -0,0 +1 @@ +shstdep = shared_library('shstdep', 'lib.c', link_with : stnodep) diff --git a/meson/test cases/common/145 recursive linking/stnodep/lib.c b/meson/test cases/common/145 recursive linking/stnodep/lib.c new file mode 100644 index 000000000..4bc50bea2 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/stnodep/lib.c @@ -0,0 +1,6 @@ +#include "../lib.h" + +SYMBOL_EXPORT +int get_stnodep_value (void) { + return 2; +} diff --git a/meson/test cases/common/145 recursive linking/stnodep/meson.build b/meson/test cases/common/145 recursive linking/stnodep/meson.build new file mode 100644 index 000000000..77f7129b6 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/stnodep/meson.build @@ -0,0 +1,2 @@ +stnodep = static_library('stnodep', 'lib.c', + c_args : '-DMESON_STATIC_BUILD') diff --git a/meson/test cases/common/145 recursive linking/stshdep/lib.c b/meson/test cases/common/145 recursive linking/stshdep/lib.c new file mode 100644 index 000000000..3cfa12bcb --- /dev/null +++ b/meson/test cases/common/145 recursive linking/stshdep/lib.c @@ -0,0 +1,8 @@ +#include "../lib.h" + +int get_shnodep_value (void); + +SYMBOL_EXPORT +int get_stshdep_value (void) { + return get_shnodep_value (); +} diff --git a/meson/test cases/common/145 recursive linking/stshdep/meson.build b/meson/test cases/common/145 recursive linking/stshdep/meson.build new file mode 100644 index 000000000..0967c1ce4 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/stshdep/meson.build @@ -0,0 +1,2 @@ +stshdep = static_library('stshdep', 'lib.c', link_with : shnodep, + c_args : '-DMESON_STATIC_BUILD') diff --git a/meson/test cases/common/145 recursive linking/ststdep/lib.c b/meson/test cases/common/145 recursive linking/ststdep/lib.c new file mode 100644 index 000000000..fca870669 --- /dev/null +++ b/meson/test cases/common/145 recursive linking/ststdep/lib.c @@ -0,0 +1,8 @@ +#include "../lib.h" + +int get_stnodep_value (void); + +SYMBOL_EXPORT +int get_ststdep_value (void) { + return get_stnodep_value (); +} diff --git a/meson/test cases/common/145 recursive linking/ststdep/meson.build b/meson/test cases/common/145 recursive linking/ststdep/meson.build new file mode 100644 index 000000000..3602442aa --- /dev/null +++ b/meson/test cases/common/145 recursive linking/ststdep/meson.build @@ -0,0 +1,2 @@ +ststdep = static_library('ststdep', 'lib.c', link_with : stnodep, + c_args : '-DMESON_STATIC_BUILD') diff --git a/meson/test cases/common/146 library at root/lib.c b/meson/test cases/common/146 library at root/lib.c new file mode 100644 index 000000000..a5b3dc3b8 --- /dev/null +++ b/meson/test cases/common/146 library at root/lib.c @@ -0,0 +1,6 @@ +#if defined _WIN32 || defined __CYGWIN__ +__declspec(dllexport) +#endif +int fn(void) { + return -1; +} diff --git a/meson/test cases/common/146 library at root/main/main.c b/meson/test cases/common/146 library at root/main/main.c new file mode 100644 index 000000000..eadf7e8ce --- /dev/null +++ b/meson/test cases/common/146 library at root/main/main.c @@ -0,0 +1,5 @@ +extern int fn(void); + +int main(void) { + return 1 + fn(); +} diff --git a/meson/test cases/common/146 library at root/main/meson.build b/meson/test cases/common/146 library at root/main/meson.build new file mode 100644 index 000000000..557378ae5 --- /dev/null +++ b/meson/test cases/common/146 library at root/main/meson.build @@ -0,0 +1,2 @@ +exe = executable('main', 'main.c', link_with : lib) +test('stuff works', exe) diff --git a/meson/test cases/common/146 library at root/meson.build b/meson/test cases/common/146 library at root/meson.build new file mode 100644 index 000000000..e6526711a --- /dev/null +++ b/meson/test cases/common/146 library at root/meson.build @@ -0,0 +1,3 @@ +project('lib@root', 'c') +lib = library('lib', 'lib.c') +subdir('main') diff --git a/meson/test cases/common/147 simd/fallback.c b/meson/test cases/common/147 simd/fallback.c new file mode 100644 index 000000000..ab435f433 --- /dev/null +++ b/meson/test cases/common/147 simd/fallback.c @@ -0,0 +1,8 @@ +#include + +void increment_fallback(float arr[4]) { + int i; + for(i=0; i<4; i++) { + arr[i]++; + } +} diff --git a/meson/test cases/common/147 simd/include/simdheader.h b/meson/test cases/common/147 simd/include/simdheader.h new file mode 100644 index 000000000..6515e413e --- /dev/null +++ b/meson/test cases/common/147 simd/include/simdheader.h @@ -0,0 +1,3 @@ +#pragma once + +#define I_CAN_HAZ_SIMD diff --git a/meson/test cases/common/147 simd/meson.build b/meson/test cases/common/147 simd/meson.build new file mode 100644 index 000000000..2628a1234 --- /dev/null +++ b/meson/test cases/common/147 simd/meson.build @@ -0,0 +1,44 @@ +project('simd', 'c') + +simd = import('unstable-simd') + +cc = meson.get_compiler('c') + +cdata = configuration_data() + +if not meson.is_cross_build() and host_machine.cpu_family() == 'arm' and cc.get_id() == 'clang' + message('Adding -march=armv7 because assuming that this build happens on Raspbian.') + message('Its Clang seems to be misconfigured and does not support NEON by default.') + add_project_arguments('-march=armv7', language : 'c') +endif + +if cc.get_id() == 'msvc' and cc.version().version_compare('<17') + error('MESON_SKIP_TEST VS2010 produces broken binaries on x86.') +endif + +# FIXME add [a, b] = function() +rval = simd.check('mysimds', + mmx : 'simd_mmx.c', + sse : 'simd_sse.c', + sse2 : 'simd_sse2.c', + sse3 : 'simd_sse3.c', + ssse3 : 'simd_ssse3.c', + sse41 : 'simd_sse41.c', + sse42 : 'simd_sse42.c', + avx : 'simd_avx.c', + avx2 : 'simd_avx2.c', + neon : 'simd_neon.c', + compiler : cc, + include_directories : include_directories('include')) + +simdlibs = rval[0] +cdata.merge_from(rval[1]) + +configure_file(output : 'simdconfig.h', + configuration : cdata) + +p = executable('simdtest', 'simdchecker.c', 'fallback.c', + link_with : simdlibs) + +test('simdtest', p) + diff --git a/meson/test cases/common/147 simd/simd_avx.c b/meson/test cases/common/147 simd/simd_avx.c new file mode 100644 index 000000000..5f45a4e2f --- /dev/null +++ b/meson/test cases/common/147 simd/simd_avx.c @@ -0,0 +1,49 @@ +#include + +#ifndef I_CAN_HAZ_SIMD +#error The correct internal header was not used +#endif + +#include +#include +#include + +#ifdef _MSC_VER +#include +int avx_available(void) { + return 1; +} +#else +#include +#include + +#ifdef __APPLE__ +/* + * Apple ships a broken __builtin_cpu_supports and + * some machines in the CI farm seem to be too + * old to have AVX so just always return 0 here. + */ +int avx_available(void) { return 0; } +#else + +int avx_available(void) { + return __builtin_cpu_supports("avx"); +} +#endif +#endif + +void increment_avx(float arr[4]) { + double darr[4]; + darr[0] = arr[0]; + darr[1] = arr[1]; + darr[2] = arr[2]; + darr[3] = arr[3]; + __m256d val = _mm256_loadu_pd(darr); + __m256d one = _mm256_set1_pd(1.0); + __m256d result = _mm256_add_pd(val, one); + _mm256_storeu_pd(darr, result); + arr[0] = (float)darr[0]; + arr[1] = (float)darr[1]; + arr[2] = (float)darr[2]; + arr[3] = (float)darr[3]; +} diff --git a/meson/test cases/common/147 simd/simd_avx2.c b/meson/test cases/common/147 simd/simd_avx2.c new file mode 100644 index 000000000..c79819b75 --- /dev/null +++ b/meson/test cases/common/147 simd/simd_avx2.c @@ -0,0 +1,42 @@ +#include +#include +#include + +/* + * FIXME add proper runtime detection for VS. + */ + +#ifdef _MSC_VER +#include +int avx2_available(void) { + return 0; +} +#else +#include +#include + +#if defined(__APPLE__) +int avx2_available(void) { return 0; } +#else +int avx2_available(void) { + return __builtin_cpu_supports("avx2"); +} +#endif +#endif + +void increment_avx2(float arr[4]) { + double darr[4]; + darr[0] = arr[0]; + darr[1] = arr[1]; + darr[2] = arr[2]; + darr[3] = arr[3]; + __m256d val = _mm256_loadu_pd(darr); + __m256d one = _mm256_set1_pd(1.0); + __m256d result = _mm256_add_pd(val, one); + _mm256_storeu_pd(darr, result); + one = _mm256_permute4x64_pd(one, 66); /* A no-op, just here to use AVX2. */ + arr[0] = (float)darr[0]; + arr[1] = (float)darr[1]; + arr[2] = (float)darr[2]; + arr[3] = (float)darr[3]; +} diff --git a/meson/test cases/common/147 simd/simd_mmx.c b/meson/test cases/common/147 simd/simd_mmx.c new file mode 100644 index 000000000..76054420b --- /dev/null +++ b/meson/test cases/common/147 simd/simd_mmx.c @@ -0,0 +1,67 @@ +#include +#include + +#include + +#ifdef _MSC_VER +#include +int mmx_available(void) { + return 1; +} +/* Contrary to MSDN documentation, MMX intrinsics + * just plain don't work. + */ +void increment_mmx(float arr[4]) { + arr[0]++; + arr[1]++; + arr[2]++; + arr[3]++; +} +#elif defined(__MINGW32__) +int mmx_available(void) { + return 1; +} +/* MinGW does not seem to ship with MMX or it is broken. + */ +void increment_mmx(float arr[4]) { + arr[0]++; + arr[1]++; + arr[2]++; + arr[3]++; +} +#else +#include +#include + +#if defined(__APPLE__) +int mmx_available(void) { return 1; } +#else +int mmx_available(void) { + return __builtin_cpu_supports("mmx"); +} +#endif +void increment_mmx(float arr[4]) { + /* Super ugly but we know that values in arr are always small + * enough to fit in int16; + */ + int i; + __m64 packed = _mm_set_pi16(arr[3], arr[2], arr[1], arr[0]); + __m64 incr = _mm_set1_pi16(1); + __m64 result = _mm_add_pi16(packed, incr); + /* Should be + * int64_t unpacker = _m_to_int64(result); + * but it does not exist on 32 bit platforms for some reason. + */ + int64_t unpacker = (int64_t)(result); + _mm_empty(); + for(i=0; i<4; i++) { + /* This fails on GCC 8 when optimizations are enabled. + * Disable it. Patches welcome to fix this. + arr[i] = (float)(unpacker & ((1<<16)-1)); + unpacker >>= 16; + */ + arr[i] += 1.0f; + } +} + +#endif diff --git a/meson/test cases/common/147 simd/simd_neon.c b/meson/test cases/common/147 simd/simd_neon.c new file mode 100644 index 000000000..2834b3096 --- /dev/null +++ b/meson/test cases/common/147 simd/simd_neon.c @@ -0,0 +1,20 @@ +#include +#include + +#include +#include + +int neon_available(void) { + return 1; /* Incorrect, but I don't know how to check this properly. */ +} + +void increment_neon(float arr[4]) { + float32x2_t a1, a2, one; + a1 = vld1_f32(arr); + a2 = vld1_f32(&arr[2]); + one = vdup_n_f32(1.0); + a1 = vadd_f32(a1, one); + a2 = vadd_f32(a2, one); + vst1_f32(arr, a1); + vst1_f32(&arr[2], a2); +} diff --git a/meson/test cases/common/147 simd/simd_sse.c b/meson/test cases/common/147 simd/simd_sse.c new file mode 100644 index 000000000..6014e0cc9 --- /dev/null +++ b/meson/test cases/common/147 simd/simd_sse.c @@ -0,0 +1,29 @@ +#include +#include + +#ifdef _MSC_VER +#include +int sse_available(void) { + return 1; +} +#else + +#include +#include +#include + +#if defined(__APPLE__) +int sse_available(void) { return 1; } +#else +int sse_available(void) { + return __builtin_cpu_supports("sse"); +} +#endif +#endif + +void increment_sse(float arr[4]) { + __m128 val = _mm_load_ps(arr); + __m128 one = _mm_set_ps1(1.0); + __m128 result = _mm_add_ps(val, one); + _mm_storeu_ps(arr, result); +} diff --git a/meson/test cases/common/147 simd/simd_sse2.c b/meson/test cases/common/147 simd/simd_sse2.c new file mode 100644 index 000000000..445afb631 --- /dev/null +++ b/meson/test cases/common/147 simd/simd_sse2.c @@ -0,0 +1,37 @@ +#include +#include +#include + +#ifdef _MSC_VER +int sse2_available(void) { + return 1; +} + +#else +#include +#include + +#if defined(__APPLE__) +int sse2_available(void) { return 1; } +#else +int sse2_available(void) { + return __builtin_cpu_supports("sse2"); +} +#endif +#endif + +void increment_sse2(float arr[4]) { + ALIGN_16 double darr[4]; + __m128d val1 = _mm_set_pd(arr[0], arr[1]); + __m128d val2 = _mm_set_pd(arr[2], arr[3]); + __m128d one = _mm_set_pd(1.0, 1.0); + __m128d result = _mm_add_pd(val1, one); + _mm_store_pd(darr, result); + result = _mm_add_pd(val2, one); + _mm_store_pd(&darr[2], result); + arr[0] = (float)darr[1]; + arr[1] = (float)darr[0]; + arr[2] = (float)darr[3]; + arr[3] = (float)darr[2]; +} + diff --git a/meson/test cases/common/147 simd/simd_sse3.c b/meson/test cases/common/147 simd/simd_sse3.c new file mode 100644 index 000000000..29a35e60f --- /dev/null +++ b/meson/test cases/common/147 simd/simd_sse3.c @@ -0,0 +1,38 @@ +#include +#include + +#ifdef _MSC_VER +#include +int sse3_available(void) { + return 1; +} +#else + +#include +#include +#include + +#if defined(__APPLE__) +int sse3_available(void) { return 1; } +#else +int sse3_available(void) { + return __builtin_cpu_supports("sse3"); +} +#endif +#endif + +void increment_sse3(float arr[4]) { + ALIGN_16 double darr[4]; + __m128d val1 = _mm_set_pd(arr[0], arr[1]); + __m128d val2 = _mm_set_pd(arr[2], arr[3]); + __m128d one = _mm_set_pd(1.0, 1.0); + __m128d result = _mm_add_pd(val1, one); + _mm_store_pd(darr, result); + result = _mm_add_pd(val2, one); + _mm_store_pd(&darr[2], result); + result = _mm_hadd_pd(val1, val2); /* This does nothing. Only here so we use an SSE3 instruction. */ + arr[0] = (float)darr[1]; + arr[1] = (float)darr[0]; + arr[2] = (float)darr[3]; + arr[3] = (float)darr[2]; +} diff --git a/meson/test cases/common/147 simd/simd_sse41.c b/meson/test cases/common/147 simd/simd_sse41.c new file mode 100644 index 000000000..29f25554a --- /dev/null +++ b/meson/test cases/common/147 simd/simd_sse41.c @@ -0,0 +1,40 @@ +#include +#include + +#include + +#ifdef _MSC_VER +#include + +int sse41_available(void) { + return 1; +} + +#else +#include +#include + +#if defined(__APPLE__) +int sse41_available(void) { return 1; } +#else +int sse41_available(void) { + return __builtin_cpu_supports("sse4.1"); +} +#endif +#endif + +void increment_sse41(float arr[4]) { + ALIGN_16 double darr[4]; + __m128d val1 = _mm_set_pd(arr[0], arr[1]); + __m128d val2 = _mm_set_pd(arr[2], arr[3]); + __m128d one = _mm_set_pd(1.0, 1.0); + __m128d result = _mm_add_pd(val1, one); + result = _mm_ceil_pd(result); /* A no-op, only here to use a SSE4.1 intrinsic. */ + _mm_store_pd(darr, result); + result = _mm_add_pd(val2, one); + _mm_store_pd(&darr[2], result); + arr[0] = (float)darr[1]; + arr[1] = (float)darr[0]; + arr[2] = (float)darr[3]; + arr[3] = (float)darr[2]; +} diff --git a/meson/test cases/common/147 simd/simd_sse42.c b/meson/test cases/common/147 simd/simd_sse42.c new file mode 100644 index 000000000..f1564e2b9 --- /dev/null +++ b/meson/test cases/common/147 simd/simd_sse42.c @@ -0,0 +1,43 @@ +#include +#include +#include + +#ifdef _MSC_VER +#include + +int sse42_available(void) { + return 1; +} + +#else + +#include +#include + +#ifdef __APPLE__ +int sse42_available(void) { + return 1; +} +#else +int sse42_available(void) { + return __builtin_cpu_supports("sse4.2"); +} +#endif + +#endif + +void increment_sse42(float arr[4]) { + ALIGN_16 double darr[4]; + __m128d val1 = _mm_set_pd(arr[0], arr[1]); + __m128d val2 = _mm_set_pd(arr[2], arr[3]); + __m128d one = _mm_set_pd(1.0, 1.0); + __m128d result = _mm_add_pd(val1, one); + _mm_store_pd(darr, result); + result = _mm_add_pd(val2, one); + _mm_store_pd(&darr[2], result); + _mm_crc32_u32(42, 99); /* A no-op, only here to use an SSE4.2 instruction. */ + arr[0] = (float)darr[1]; + arr[1] = (float)darr[0]; + arr[2] = (float)darr[3]; + arr[3] = (float)darr[2]; +} diff --git a/meson/test cases/common/147 simd/simd_ssse3.c b/meson/test cases/common/147 simd/simd_ssse3.c new file mode 100644 index 000000000..fa557f4c1 --- /dev/null +++ b/meson/test cases/common/147 simd/simd_ssse3.c @@ -0,0 +1,48 @@ +#include +#include + +#include +#include + +#ifdef _MSC_VER +#include + +int ssse3_available(void) { + return 1; +} + +#else + +#include +#include + +int ssse3_available(void) { +#ifdef __APPLE__ + return 1; +#elif defined(__clang__) + /* https://github.com/numpy/numpy/issues/8130 */ + return __builtin_cpu_supports("sse4.1"); +#else + return __builtin_cpu_supports("ssse3"); +#endif +} + +#endif + +void increment_ssse3(float arr[4]) { + ALIGN_16 double darr[4]; + __m128d val1 = _mm_set_pd(arr[0], arr[1]); + __m128d val2 = _mm_set_pd(arr[2], arr[3]); + __m128d one = _mm_set_pd(1.0, 1.0); + __m128d result = _mm_add_pd(val1, one); + __m128i tmp1, tmp2; + tmp1 = tmp2 = _mm_set1_epi16(0); + _mm_store_pd(darr, result); + result = _mm_add_pd(val2, one); + _mm_store_pd(&darr[2], result); + tmp1 = _mm_hadd_epi32(tmp1, tmp2); /* This does nothing. Only here so we use an SSSE3 instruction. */ + arr[0] = (float)darr[1]; + arr[1] = (float)darr[0]; + arr[2] = (float)darr[3]; + arr[3] = (float)darr[2]; +} diff --git a/meson/test cases/common/147 simd/simdchecker.c b/meson/test cases/common/147 simd/simdchecker.c new file mode 100644 index 000000000..c7a0a978a --- /dev/null +++ b/meson/test cases/common/147 simd/simdchecker.c @@ -0,0 +1,143 @@ +#include +#include +#include + +typedef void (*simd_func)(float*); + +int check_simd_implementation(float *four, + const float *four_initial, + const char *simd_type, + const float *expected, + simd_func fptr, + const int blocksize) { + int rv = 0; + memcpy(four, four_initial, blocksize*sizeof(float)); + printf("Using %s.\n", simd_type); + fptr(four); + for(int i=0; i + +#ifdef _MSC_VER +#define ALIGN_16 __declspec(align(16)) +#else +#include +#define ALIGN_16 alignas(16) +#endif + + +/* Yes, I do know that arr[4] decays into a pointer + * as a function argument. Don't do this in real code + * but for this test it is ok. + */ + +void increment_fallback(float arr[4]); + +#if HAVE_MMX +int mmx_available(void); +void increment_mmx(float arr[4]); +#endif + +#if HAVE_SSE +int sse_available(void); +void increment_sse(float arr[4]); +#endif + +#if HAVE_SSE2 +int sse2_available(void); +void increment_sse2(float arr[4]); +#endif + +#if HAVE_SSE3 +int sse3_available(void); +void increment_sse3(float arr[4]); +#endif + +#if HAVE_SSSE3 +int ssse3_available(void); +void increment_ssse3(float arr[4]); +#endif + +#if HAVE_SSE41 +int sse41_available(void); +void increment_sse41(float arr[4]); +#endif + +#if HAVE_SSE42 +int sse42_available(void); +void increment_sse42(float arr[4]); +#endif + +#if HAVE_AVX +int avx_available(void); +void increment_avx(float arr[4]); +#endif + +#if HAVE_AVX2 +int avx2_available(void); +void increment_avx2(float arr[4]); +#endif + +#if HAVE_NEON +int neon_available(void); +void increment_neon(float arr[4]); +#endif + +#if HAVE_ALTIVEC +int altivec_available(void); +void increment_altivec(float arr[4]); +#endif + +/* And so on. */ diff --git a/meson/test cases/common/148 shared module resolving symbol in executable/meson.build b/meson/test cases/common/148 shared module resolving symbol in executable/meson.build new file mode 100644 index 000000000..4e5188f12 --- /dev/null +++ b/meson/test cases/common/148 shared module resolving symbol in executable/meson.build @@ -0,0 +1,20 @@ +project('shared module resolving symbol in executable', 'c') + +# The shared module contains a reference to the symbol 'func_from_executable', +# which is always provided by the executable which loads it. This symbol can be +# resolved at run-time by an ELF loader. But when building PE/COFF objects, all +# symbols must be resolved at link-time, so an implib is generated for the +# executable, and the shared module linked with it. +# +# See testcase 125 for an example of the more complex portability gymnastics +# required if we do not know (at link-time) what provides the symbol. + +cc = meson.get_compiler('c') +if cc.get_id() == 'pgi' + error('MESON_SKIP_TEST PGI has its own unique set of macros that would need to be handled') +endif + +dl = meson.get_compiler('c').find_library('dl', required: false) +e = executable('prog', 'prog.c', dependencies: dl, export_dynamic: true) +m = shared_module('module', 'module.c', link_with: e) +test('test', e, args: m.full_path()) diff --git a/meson/test cases/common/148 shared module resolving symbol in executable/module.c b/meson/test cases/common/148 shared module resolving symbol in executable/module.c new file mode 100644 index 000000000..64374d590 --- /dev/null +++ b/meson/test cases/common/148 shared module resolving symbol in executable/module.c @@ -0,0 +1,16 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +extern int func_from_executable(void); + +int DLL_PUBLIC func(void) { + return func_from_executable(); +} diff --git a/meson/test cases/common/148 shared module resolving symbol in executable/prog.c b/meson/test cases/common/148 shared module resolving symbol in executable/prog.c new file mode 100644 index 000000000..b2abcdb18 --- /dev/null +++ b/meson/test cases/common/148 shared module resolving symbol in executable/prog.c @@ -0,0 +1,61 @@ +#include +#include +#ifdef _WIN32 +#include +#else +#include +#endif + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +typedef int (*fptr) (void); + +int DLL_PUBLIC +func_from_executable(void) +{ + return 42; +} + +int main(int argc, char **argv) +{ + int expected, actual; + fptr importedfunc; + + if (argc=0) {}; // noop + +#ifdef _WIN32 + HMODULE h = LoadLibraryA(argv[1]); +#else + void *h = dlopen(argv[1], RTLD_NOW); +#endif + assert(h != NULL); + +#ifdef _WIN32 + importedfunc = (fptr) GetProcAddress (h, "func"); +#else + importedfunc = (fptr) dlsym(h, "func"); +#endif + assert(importedfunc != NULL); + assert(importedfunc != func_from_executable); + + actual = (*importedfunc)(); + expected = func_from_executable(); + assert(actual == expected); + +#ifdef _WIN32 + FreeLibrary(h); +#else + dlclose(h); +#endif + + return 0; +} diff --git a/meson/test cases/common/149 dotinclude/dotproc.c b/meson/test cases/common/149 dotinclude/dotproc.c new file mode 100644 index 000000000..f48c330bd --- /dev/null +++ b/meson/test cases/common/149 dotinclude/dotproc.c @@ -0,0 +1,10 @@ +#include"stdio.h" + +#ifndef WRAPPER_INCLUDED +#error The wrapper stdio.h was not included. +#endif + +int main(void) { + printf("Eventually I got printed.\n"); + return 0; +} diff --git a/meson/test cases/common/149 dotinclude/meson.build b/meson/test cases/common/149 dotinclude/meson.build new file mode 100644 index 000000000..e0c2cd7a0 --- /dev/null +++ b/meson/test cases/common/149 dotinclude/meson.build @@ -0,0 +1,5 @@ +project('dotinclude', 'c') + +executable('dotproc', 'dotproc.c', + implicit_include_directories : false) + diff --git a/meson/test cases/common/149 dotinclude/stdio.h b/meson/test cases/common/149 dotinclude/stdio.h new file mode 100644 index 000000000..b6bd09fc4 --- /dev/null +++ b/meson/test cases/common/149 dotinclude/stdio.h @@ -0,0 +1,6 @@ +// There is no #pragma once because we _want_ to cause an eternal loop +// if this wrapper invokes itself. + +#define WRAPPER_INCLUDED + +#include diff --git a/meson/test cases/common/15 if/meson.build b/meson/test cases/common/15 if/meson.build new file mode 100644 index 000000000..f8d829547 --- /dev/null +++ b/meson/test cases/common/15 if/meson.build @@ -0,0 +1,72 @@ +project('if test', 'c') + +var1 = true +set_variable('var2', false) + +if var1 + exe = executable('prog', 'prog.c') +endif + +if var2 + exe = executable('breakbreakbreak', 'crashing.c') +endif + +test('iftest', exe) + +if not is_variable('var1') + error('Is_variable fail.') +endif + +if is_variable('nonexisting') + error('Is_variable fail 2.') +endif + +if not get_variable('var1', false) + error('Get_variable fail.') +endif + +if get_variable('nonexisting', false) + error('Get_variable fail.') +endif + + +# Now test elseif + +t = true +f = false + +if true + message('Ok.') +elif true + error('Error') +else + error('Error') +endif + +if f + error('Error.') +elif t + message('Ok') +else + error('Error') +endif + +if f + error('Error.') +elif false + error('Error') +else + message('Ok') +endif + +# Test plain else + +var = false + +if var + exe = executable('break', 'break.c') +else + exe = executable('eprog', 'prog.c') +endif + +test('elsetest', exe) diff --git a/meson/test cases/common/15 if/prog.c b/meson/test cases/common/15 if/prog.c new file mode 100644 index 000000000..78f2de106 --- /dev/null +++ b/meson/test cases/common/15 if/prog.c @@ -0,0 +1 @@ +int main(void) { return 0; } diff --git a/meson/test cases/common/150 reserved targets/all/meson.build b/meson/test cases/common/150 reserved targets/all/meson.build new file mode 100644 index 000000000..adee88214 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/all/meson.build @@ -0,0 +1 @@ +executable('test-all', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/benchmark/meson.build b/meson/test cases/common/150 reserved targets/benchmark/meson.build new file mode 100644 index 000000000..242cc235d --- /dev/null +++ b/meson/test cases/common/150 reserved targets/benchmark/meson.build @@ -0,0 +1 @@ +executable('test-benchmark', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/clean-ctlist/meson.build b/meson/test cases/common/150 reserved targets/clean-ctlist/meson.build new file mode 100644 index 000000000..75eb207db --- /dev/null +++ b/meson/test cases/common/150 reserved targets/clean-ctlist/meson.build @@ -0,0 +1 @@ +executable('test-clean-ctlist', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/clean-gcda/meson.build b/meson/test cases/common/150 reserved targets/clean-gcda/meson.build new file mode 100644 index 000000000..488a527fc --- /dev/null +++ b/meson/test cases/common/150 reserved targets/clean-gcda/meson.build @@ -0,0 +1 @@ +executable('test-clean-gcda', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/clean-gcno/meson.build b/meson/test cases/common/150 reserved targets/clean-gcno/meson.build new file mode 100644 index 000000000..eec789ac2 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/clean-gcno/meson.build @@ -0,0 +1 @@ +executable('test-clean-gcno', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/clean/meson.build b/meson/test cases/common/150 reserved targets/clean/meson.build new file mode 100644 index 000000000..4e27b6c27 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/clean/meson.build @@ -0,0 +1 @@ +executable('test-clean', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/coverage-html/meson.build b/meson/test cases/common/150 reserved targets/coverage-html/meson.build new file mode 100644 index 000000000..10a4cc89b --- /dev/null +++ b/meson/test cases/common/150 reserved targets/coverage-html/meson.build @@ -0,0 +1 @@ +executable('test-coverage-html', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/coverage-text/meson.build b/meson/test cases/common/150 reserved targets/coverage-text/meson.build new file mode 100644 index 000000000..21dcae5fe --- /dev/null +++ b/meson/test cases/common/150 reserved targets/coverage-text/meson.build @@ -0,0 +1 @@ +executable('test-coverage-text', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/coverage-xml/meson.build b/meson/test cases/common/150 reserved targets/coverage-xml/meson.build new file mode 100644 index 000000000..44d7bfbe3 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/coverage-xml/meson.build @@ -0,0 +1 @@ +executable('test-coverage-xml', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/coverage/meson.build b/meson/test cases/common/150 reserved targets/coverage/meson.build new file mode 100644 index 000000000..b40105550 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/coverage/meson.build @@ -0,0 +1 @@ +executable('test-coverage', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/dist/meson.build b/meson/test cases/common/150 reserved targets/dist/meson.build new file mode 100644 index 000000000..951bbb4da --- /dev/null +++ b/meson/test cases/common/150 reserved targets/dist/meson.build @@ -0,0 +1 @@ +executable('test-dist', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/distcheck/meson.build b/meson/test cases/common/150 reserved targets/distcheck/meson.build new file mode 100644 index 000000000..12b9328ad --- /dev/null +++ b/meson/test cases/common/150 reserved targets/distcheck/meson.build @@ -0,0 +1 @@ +executable('test-distcheck', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/install/meson.build b/meson/test cases/common/150 reserved targets/install/meson.build new file mode 100644 index 000000000..483990186 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/install/meson.build @@ -0,0 +1 @@ +executable('test-install', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/meson.build b/meson/test cases/common/150 reserved targets/meson.build new file mode 100644 index 000000000..24fd937bc --- /dev/null +++ b/meson/test cases/common/150 reserved targets/meson.build @@ -0,0 +1,34 @@ +project('reserved target names', 'c') + # FIXME: Setting this causes it to leak to all other tests + #default_options : ['b_coverage=true'] + +subdir('all') +subdir('benchmark') +subdir('clean') +subdir('clean-ctlist') +subdir('clean-gcda') +subdir('clean-gcno') +subdir('coverage') +subdir('coverage-html') +subdir('coverage-text') +subdir('coverage-xml') +subdir('dist') +subdir('distcheck') +subdir('install') +# We don't have a 'PHONY' directory because Windows and OSX +# choke horribly when there are two entries with the same +# name but different case. +subdir('phony') +subdir('reconfigure') +subdir('scan-build') +subdir('test') +subdir('uninstall') + +subdir('runtarget') + +py3 = import('python3').find_python() + +custom_target('ctlist-test', output : 'out.txt', + command : [py3, '-c', 'print("")'], + capture : true, + build_by_default : true) diff --git a/meson/test cases/common/150 reserved targets/phony/meson.build b/meson/test cases/common/150 reserved targets/phony/meson.build new file mode 100644 index 000000000..6710fc124 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/phony/meson.build @@ -0,0 +1 @@ +executable('test-phony', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/reconfigure/meson.build b/meson/test cases/common/150 reserved targets/reconfigure/meson.build new file mode 100644 index 000000000..c3ea3da8a --- /dev/null +++ b/meson/test cases/common/150 reserved targets/reconfigure/meson.build @@ -0,0 +1 @@ +executable('test-reconfigure', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/runtarget/echo.py b/meson/test cases/common/150 reserved targets/runtarget/echo.py new file mode 100644 index 000000000..7f9f17992 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/runtarget/echo.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +if len(sys.argv) > 1: + print(sys.argv[1]) diff --git a/meson/test cases/common/150 reserved targets/runtarget/meson.build b/meson/test cases/common/150 reserved targets/runtarget/meson.build new file mode 100644 index 000000000..7ba0b8c02 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/runtarget/meson.build @@ -0,0 +1,2 @@ +configure_file(output : 'config.h', configuration: configuration_data()) +run_target('runtarget', command : [find_program('echo.py')]) diff --git a/meson/test cases/common/150 reserved targets/scan-build/meson.build b/meson/test cases/common/150 reserved targets/scan-build/meson.build new file mode 100644 index 000000000..100205346 --- /dev/null +++ b/meson/test cases/common/150 reserved targets/scan-build/meson.build @@ -0,0 +1 @@ +executable('test-scan-build', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/test.c b/meson/test cases/common/150 reserved targets/test.c new file mode 100644 index 000000000..03b2213bb --- /dev/null +++ b/meson/test cases/common/150 reserved targets/test.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/150 reserved targets/test/meson.build b/meson/test cases/common/150 reserved targets/test/meson.build new file mode 100644 index 000000000..4ab123cdd --- /dev/null +++ b/meson/test cases/common/150 reserved targets/test/meson.build @@ -0,0 +1 @@ +executable('test-test', '../test.c') diff --git a/meson/test cases/common/150 reserved targets/uninstall/meson.build b/meson/test cases/common/150 reserved targets/uninstall/meson.build new file mode 100644 index 000000000..21c6ca60d --- /dev/null +++ b/meson/test cases/common/150 reserved targets/uninstall/meson.build @@ -0,0 +1 @@ +executable('test-uninstall', '../test.c') diff --git a/meson/test cases/common/151 duplicate source names/dir1/file.c b/meson/test cases/common/151 duplicate source names/dir1/file.c new file mode 100644 index 000000000..1c4753eb9 --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir1/file.c @@ -0,0 +1,16 @@ +extern int dir2; +extern int dir2_dir1; +extern int dir3; +extern int dir3_dir1; + +int main(void) { + if (dir2 != 20) + return 1; + if (dir2_dir1 != 21) + return 1; + if (dir3 != 30) + return 1; + if (dir3_dir1 != 31) + return 1; + return 0; +} diff --git a/meson/test cases/common/151 duplicate source names/dir1/meson.build b/meson/test cases/common/151 duplicate source names/dir1/meson.build new file mode 100644 index 000000000..00bc85daf --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir1/meson.build @@ -0,0 +1 @@ +sources += files('file.c') diff --git a/meson/test cases/common/151 duplicate source names/dir2/dir1/file.c b/meson/test cases/common/151 duplicate source names/dir2/dir1/file.c new file mode 100644 index 000000000..5aac8e53f --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir2/dir1/file.c @@ -0,0 +1 @@ +int dir2_dir1 = 21; diff --git a/meson/test cases/common/151 duplicate source names/dir2/file.c b/meson/test cases/common/151 duplicate source names/dir2/file.c new file mode 100644 index 000000000..6cf8d6602 --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir2/file.c @@ -0,0 +1 @@ +int dir2 = 20; diff --git a/meson/test cases/common/151 duplicate source names/dir2/meson.build b/meson/test cases/common/151 duplicate source names/dir2/meson.build new file mode 100644 index 000000000..f116a0200 --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir2/meson.build @@ -0,0 +1 @@ +sources += files('file.c', 'dir1/file.c') diff --git a/meson/test cases/common/151 duplicate source names/dir3/dir1/file.c b/meson/test cases/common/151 duplicate source names/dir3/dir1/file.c new file mode 100644 index 000000000..04667c2ff --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir3/dir1/file.c @@ -0,0 +1 @@ +int dir3_dir1 = 31; diff --git a/meson/test cases/common/151 duplicate source names/dir3/file.c b/meson/test cases/common/151 duplicate source names/dir3/file.c new file mode 100644 index 000000000..d16d0a8cf --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir3/file.c @@ -0,0 +1 @@ +int dir3 = 30; diff --git a/meson/test cases/common/151 duplicate source names/dir3/meson.build b/meson/test cases/common/151 duplicate source names/dir3/meson.build new file mode 100644 index 000000000..70ddbf2d0 --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/dir3/meson.build @@ -0,0 +1 @@ +lib = static_library('lib', 'file.c', 'dir1/file.c') diff --git a/meson/test cases/common/151 duplicate source names/meson.build b/meson/test cases/common/151 duplicate source names/meson.build new file mode 100644 index 000000000..635aa8cf7 --- /dev/null +++ b/meson/test cases/common/151 duplicate source names/meson.build @@ -0,0 +1,19 @@ +project('proj', 'c') + +if meson.backend() == 'xcode' + # Xcode gives object files unique names but only if they would clash. For example + # two files named lib.o instead get the following names: + # + # lib-4fbe522d8ba4cb1f1b89cc2df640a2336b92e1a5565f0a4c5a79b5b5e2969eb9.o + # lib-4fbe522d8ba4cb1f1b89cc2df640a2336deeff2bc2297affaadbe20f5cbfee56.o + # + # No-one has reverse engineered the naming scheme so we would access them. + # IF you feel up to the challenge, patches welcome. + error('MESON_SKIP_TEST, Xcode can not extract objs when they would have the same filename.') +endif + +sources = [] +subdir('dir1') +subdir('dir2') +subdir('dir3') +executable('a.out', sources : sources, objects : lib.extract_all_objects()) diff --git a/meson/test cases/common/152 index customtarget/check_args.py b/meson/test cases/common/152 index customtarget/check_args.py new file mode 100644 index 000000000..8663a6fe2 --- /dev/null +++ b/meson/test cases/common/152 index customtarget/check_args.py @@ -0,0 +1,18 @@ +#!python3 + +import sys +from pathlib import Path + +def main(): + if len(sys.argv) != 2: + print(sys.argv) + return 1 + if sys.argv[1] != 'gen.c': + print(sys.argv) + return 2 + Path('foo').touch() + + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/meson/test cases/common/152 index customtarget/gen_sources.py b/meson/test cases/common/152 index customtarget/gen_sources.py new file mode 100644 index 000000000..0bdb529cd --- /dev/null +++ b/meson/test cases/common/152 index customtarget/gen_sources.py @@ -0,0 +1,49 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import textwrap + +HEADER = textwrap.dedent('''\ + void stringify(int foo, char * buffer); + ''') + +CODE = textwrap.dedent('''\ + #include + + #ifndef WORKS + # error "This shouldn't have been included" + #endif + + void stringify(int foo, char * buffer) { + sprintf(buffer, "%i", foo); + } + ''') + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--header') + parser.add_argument('--code') + args = parser.parse_args() + + with open(args.header, 'w') as f: + f.write(HEADER) + + with open(args.code, 'w') as f: + f.write(CODE) + + +if __name__ == '__main__': + main() diff --git a/meson/test cases/common/152 index customtarget/lib.c b/meson/test cases/common/152 index customtarget/lib.c new file mode 100644 index 000000000..17117d54c --- /dev/null +++ b/meson/test cases/common/152 index customtarget/lib.c @@ -0,0 +1,20 @@ +/* Copyright © 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "gen.h" + +void func(char * buffer) { + stringify(1, buffer); +} diff --git a/meson/test cases/common/152 index customtarget/meson.build b/meson/test cases/common/152 index customtarget/meson.build new file mode 100644 index 000000000..efddfaca3 --- /dev/null +++ b/meson/test cases/common/152 index customtarget/meson.build @@ -0,0 +1,80 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project('custom_target_index', 'c', default_options : 'c_std=c89') + +py_mod = import('python3') +prog_python = py_mod.find_python() + +gen = custom_target( + 'gen.[ch]', + input : 'gen_sources.py', + output : ['gen.c', 'gen.h'], + command : [prog_python, '@INPUT@', '--header', '@OUTPUT1@', '--code', '@OUTPUT0@'], +) + +has_not_changed = false +if is_disabler(gen) + has_not_changed = true +else + has_not_changed = true +endif + +assert(has_not_changed, 'Custom target has changed.') + +assert(not is_disabler(gen), 'Custom target is a disabler.') + +lib = static_library( + 'libfoo', + ['lib.c', gen[1]], +) + +has_not_changed = false +if is_disabler(lib) + has_not_changed = true +else + has_not_changed = true +endif + +assert(has_not_changed, 'Static library has changed.') + +assert(not is_disabler(lib), 'Static library is a disabler.') + +custom_target( + 'foo', + input: gen[0], + output: 'foo', + command: [find_program('check_args.py'), '@INPUT@'], +) + +subdir('subdir') + +gen = disabler() + +assert(is_disabler(gen), 'Generator is not a disabler.') + +lib = static_library( + 'libfoo', + ['lib.c', gen[1]], +) + +assert(is_disabler(lib), 'Static library is not a disabler.') + +if lib.found() + lib_disabled = false +else + lib_disabled = true +endif + +assert(lib_disabled, 'Static library was not disabled.') diff --git a/meson/test cases/common/152 index customtarget/subdir/foo.c b/meson/test cases/common/152 index customtarget/subdir/foo.c new file mode 100644 index 000000000..c620a1183 --- /dev/null +++ b/meson/test cases/common/152 index customtarget/subdir/foo.c @@ -0,0 +1,22 @@ +/* Copyright © 2017 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "gen.h" + +int main(void) { + char buf[50]; + stringify(10, buf); + return 0; +} diff --git a/meson/test cases/common/152 index customtarget/subdir/meson.build b/meson/test cases/common/152 index customtarget/subdir/meson.build new file mode 100644 index 000000000..47bcd322a --- /dev/null +++ b/meson/test cases/common/152 index customtarget/subdir/meson.build @@ -0,0 +1,19 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +foo = executable( + 'foo', + ['foo.c', gen[0], gen[1]], + c_args : '-DWORKS', +) diff --git a/meson/test cases/common/153 wrap file should not failed/meson.build b/meson/test cases/common/153 wrap file should not failed/meson.build new file mode 100644 index 000000000..48d10688a --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/meson.build @@ -0,0 +1,16 @@ +project('mainproj', 'c', + default_options : ['wrap_mode=nodownload'], +) + +subproject('zlib') +foo = subproject('foo') +bar = subproject('bar') + +libfoo = foo.get_variable('libfoo') +libbar = bar.get_variable('libbar') + +executable('grabprog', files('src/subprojects/prog.c')) +executable('grabprog2', files('src/subprojects/foo/prog2.c')) +subdir('src') + +subproject('patchdir') diff --git a/meson/test cases/common/153 wrap file should not failed/src/meson.build b/meson/test cases/common/153 wrap file should not failed/src/meson.build new file mode 100644 index 000000000..0c82165d1 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/src/meson.build @@ -0,0 +1,6 @@ +executable('grabprog3', files('subprojects/prog.c')) +executable('grabprog4', files('subprojects/foo/prog2.c')) + +texe = executable('testexe', files('test.c'), link_with: [libfoo, libbar]) + +test('t1', texe) diff --git a/meson/test cases/common/153 wrap file should not failed/src/subprojects/foo/prog2.c b/meson/test cases/common/153 wrap file should not failed/src/subprojects/foo/prog2.c new file mode 100644 index 000000000..849b40004 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/src/subprojects/foo/prog2.c @@ -0,0 +1,7 @@ +#include + +int main(void) { + printf("Do not have a file layout like this in your own projects.\n"); + printf("This is only to test that this works.\n"); + return 0; +} diff --git a/meson/test cases/common/153 wrap file should not failed/src/subprojects/prog.c b/meson/test cases/common/153 wrap file should not failed/src/subprojects/prog.c new file mode 100644 index 000000000..849b40004 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/src/subprojects/prog.c @@ -0,0 +1,7 @@ +#include + +int main(void) { + printf("Do not have a file layout like this in your own projects.\n"); + printf("This is only to test that this works.\n"); + return 0; +} diff --git a/meson/test cases/common/153 wrap file should not failed/src/test.c b/meson/test cases/common/153 wrap file should not failed/src/test.c new file mode 100644 index 000000000..34cf99183 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/src/test.c @@ -0,0 +1,9 @@ +#include + +int bar_dummy_func(void); +int dummy_func(void); + +int main(void) { + printf("Hello world %d\n", bar_dummy_func() + dummy_func()); + return 0; +} diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/.gitignore b/meson/test cases/common/153 wrap file should not failed/subprojects/.gitignore new file mode 100644 index 000000000..aabded6b2 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/.gitignore @@ -0,0 +1,3 @@ +/foo-1.0 +/bar-1.0 +/foo-1.0-patchdir diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/bar.wrap b/meson/test cases/common/153 wrap file should not failed/subprojects/bar.wrap new file mode 100644 index 000000000..4e8f7e392 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/bar.wrap @@ -0,0 +1,8 @@ +[wrap-file] +directory = bar-1.0 +lead_directory_missing = true + +source_filename = bar-1.0.tar.xz +source_hash = f0f61948530dc0d33e3028cd71a9f8ee869f6b3665960d8f41d715cf4aed6467 + +patch_filename = bar-1.0-patch.tar.xz diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/foo.wrap b/meson/test cases/common/153 wrap file should not failed/subprojects/foo.wrap new file mode 100644 index 000000000..c67c5e5db --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/foo.wrap @@ -0,0 +1,11 @@ +[wrap-file] +directory = foo-1.0 + +source_url = http://something.invalid +source_filename = foo-1.0.tar.xz +source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1 +lead_directory_missing = true + +patch_url = https://something.invalid/patch +patch_filename = foo-1.0-patch.tar.xz +patch_hash = d0ddc5e60fdb27d808552f5ac8d0bb603ea2cba306538b4427b985535b26c9c5 diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz b/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz new file mode 100644 index 000000000..f257a1965 Binary files /dev/null and b/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz differ diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz b/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz new file mode 100644 index 000000000..d90a9e8b0 Binary files /dev/null and b/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz differ diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build b/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build new file mode 100644 index 000000000..dbaf91f2b --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build @@ -0,0 +1,2 @@ +project('static lib patchdir', 'c') +libfoo = static_library('foo', 'foo.c') diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/patchdir.wrap b/meson/test cases/common/153 wrap file should not failed/subprojects/patchdir.wrap new file mode 100644 index 000000000..1a2134c3d --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/patchdir.wrap @@ -0,0 +1,9 @@ +[wrap-file] +directory = foo-1.0-patchdir + +source_url = http://something.invalid +source_filename = foo-1.0.tar.xz +source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1 +lead_directory_missing = true + +patch_directory = foo-1.0 diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/foo.c b/meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/foo.c new file mode 100644 index 000000000..267b43aa5 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/foo.c @@ -0,0 +1,3 @@ +int dummy_func(void) { + return 42; +} diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/meson.build b/meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/meson.build new file mode 100644 index 000000000..70d493fcc --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/zlib-1.2.8/meson.build @@ -0,0 +1,2 @@ +project('shared lib', 'c') +library('foo', 'foo.c') diff --git a/meson/test cases/common/153 wrap file should not failed/subprojects/zlib.wrap b/meson/test cases/common/153 wrap file should not failed/subprojects/zlib.wrap new file mode 100644 index 000000000..6d5896f79 --- /dev/null +++ b/meson/test cases/common/153 wrap file should not failed/subprojects/zlib.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = zlib-1.2.8 + +source_url = http://zlib.net/fossils/zlib-1.2.8.tar.gz +source_filename = zlib-1.2.8.tar.gz +source_hash = 36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d + +patch_url = https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.8/8/get_zip +patch_filename = zlib-1.2.8-8-wrap.zip +patch_hash = 17c52a0e0c59ce926d3959005d5cd8178c6c7e2c9a4a1304279a8320c955ac60 diff --git a/meson/test cases/common/154 includedir subproj/meson.build b/meson/test cases/common/154 includedir subproj/meson.build new file mode 100644 index 000000000..b3de5af6c --- /dev/null +++ b/meson/test cases/common/154 includedir subproj/meson.build @@ -0,0 +1,9 @@ +project('include dir in subproj test', 'c') + + +subproject('inctest') + + +exe = executable('prog', 'prog.c') + +test('dummy', exe) diff --git a/meson/test cases/common/154 includedir subproj/prog.c b/meson/test cases/common/154 includedir subproj/prog.c new file mode 100644 index 000000000..03b2213bb --- /dev/null +++ b/meson/test cases/common/154 includedir subproj/prog.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/154 includedir subproj/subprojects/inctest/include/incfile.h b/meson/test cases/common/154 includedir subproj/subprojects/inctest/include/incfile.h new file mode 100644 index 000000000..ec740dab0 --- /dev/null +++ b/meson/test cases/common/154 includedir subproj/subprojects/inctest/include/incfile.h @@ -0,0 +1,2 @@ + +/* file which is used in the subproject */ diff --git a/meson/test cases/common/154 includedir subproj/subprojects/inctest/meson.build b/meson/test cases/common/154 includedir subproj/subprojects/inctest/meson.build new file mode 100644 index 000000000..74aabcb6d --- /dev/null +++ b/meson/test cases/common/154 includedir subproj/subprojects/inctest/meson.build @@ -0,0 +1,13 @@ + +project('subproj with includedir', 'c') + + + +compile_check = ''' +#include "incfile.h" +''' + +if not meson.get_compiler('c').compiles(compile_check, name : 'include in subproj', + include_directories: include_directories('include')) + error('failed') +endif diff --git a/meson/test cases/common/155 subproject dir name collision/a.c b/meson/test cases/common/155 subproject dir name collision/a.c new file mode 100644 index 000000000..7510a1b55 --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/a.c @@ -0,0 +1,13 @@ +#include +char func_b(void); +char func_c(void); + +int main(void) { + if(func_b() != 'b') { + return 1; + } + if(func_c() != 'c') { + return 2; + } + return 0; +} diff --git a/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/b.c b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/b.c new file mode 100644 index 000000000..4d71c0f9d --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/b.c @@ -0,0 +1,20 @@ +#include +char func_c(void); + +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_b(void) { + if(func_c() != 'c') { + exit(3); + } + return 'b'; +} diff --git a/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/meson.build b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/meson.build new file mode 100644 index 000000000..8f4cb023e --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/B/meson.build @@ -0,0 +1,4 @@ +project('B', 'c') +C = subproject('C') +c = C.get_variable('c') +b = library('b', 'b.c', link_with : c) diff --git a/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/c.c b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/c.c new file mode 100644 index 000000000..facd19943 --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/c.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_c(void) { + return 'c'; +} diff --git a/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/meson.build b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/meson.build new file mode 100644 index 000000000..5d890977e --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/custom_subproject_dir/C/meson.build @@ -0,0 +1,2 @@ +project('C', 'c') +c = library('c', 'c.c') diff --git a/meson/test cases/common/155 subproject dir name collision/meson.build b/meson/test cases/common/155 subproject dir name collision/meson.build new file mode 100644 index 000000000..55312175e --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/meson.build @@ -0,0 +1,12 @@ +project('A', 'c', subproject_dir:'custom_subproject_dir') + +B = subproject('B') +b = B.get_variable('b') + +C = subproject('C') +c = C.get_variable('c') + +subdir('other_subdir') + +a = executable('a', 'a.c', link_with : [b, c]) +test('a test', a) diff --git a/meson/test cases/common/155 subproject dir name collision/other_subdir/custom_subproject_dir/other.c b/meson/test cases/common/155 subproject dir name collision/other_subdir/custom_subproject_dir/other.c new file mode 100644 index 000000000..fa1953e6b --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/other_subdir/custom_subproject_dir/other.c @@ -0,0 +1,19 @@ +#include + +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_b(void) { + if('c' != 'c') { + exit(3); + } + return 'b'; +} diff --git a/meson/test cases/common/155 subproject dir name collision/other_subdir/meson.build b/meson/test cases/common/155 subproject dir name collision/other_subdir/meson.build new file mode 100644 index 000000000..37cb623ca --- /dev/null +++ b/meson/test cases/common/155 subproject dir name collision/other_subdir/meson.build @@ -0,0 +1 @@ +other = library('other', 'custom_subproject_dir/other.c') diff --git a/meson/test cases/common/156 config tool variable/meson.build b/meson/test cases/common/156 config tool variable/meson.build new file mode 100644 index 000000000..95841179c --- /dev/null +++ b/meson/test cases/common/156 config tool variable/meson.build @@ -0,0 +1,31 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project('config tool variable', 'cpp') + + +dep_llvm = dependency('llvm', method : 'config-tool', required : false) +if not dep_llvm.found() + error('MESON_SKIP_TEST LLVM not installed.') +endif + +includedir = dep_llvm.get_configtool_variable('includedir') +includedir = join_paths(includedir, 'llvm') +if host_machine.system() == 'windows' + cmd = run_command(['dir', includedir]) +else + cmd = run_command(['ls', includedir]) +endif + +assert(cmd.returncode() == 0, 'did not run successfully') diff --git a/meson/test cases/common/157 custom target subdir depend files/copyfile.py b/meson/test cases/common/157 custom target subdir depend files/copyfile.py new file mode 100644 index 000000000..ff42ac359 --- /dev/null +++ b/meson/test cases/common/157 custom target subdir depend files/copyfile.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/157 custom target subdir depend files/meson.build b/meson/test cases/common/157 custom target subdir depend files/meson.build new file mode 100644 index 000000000..44f5c7157 --- /dev/null +++ b/meson/test cases/common/157 custom target subdir depend files/meson.build @@ -0,0 +1,7 @@ +project('custom target subdir depend files', 'c') + +copy = find_program('copyfile.py') + +subdir('subdir') + +executable('foo', foo_src) diff --git a/meson/test cases/common/157 custom target subdir depend files/subdir/dep.dat b/meson/test cases/common/157 custom target subdir depend files/subdir/dep.dat new file mode 100644 index 000000000..5daee496d --- /dev/null +++ b/meson/test cases/common/157 custom target subdir depend files/subdir/dep.dat @@ -0,0 +1 @@ +You can depend on this file. \ No newline at end of file diff --git a/meson/test cases/common/157 custom target subdir depend files/subdir/foo.c.in b/meson/test cases/common/157 custom target subdir depend files/subdir/foo.c.in new file mode 100644 index 000000000..a867b3237 --- /dev/null +++ b/meson/test cases/common/157 custom target subdir depend files/subdir/foo.c.in @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("foo is working.\n"); + return 0; +} diff --git a/meson/test cases/common/157 custom target subdir depend files/subdir/meson.build b/meson/test cases/common/157 custom target subdir depend files/subdir/meson.build new file mode 100644 index 000000000..f9d31c4c7 --- /dev/null +++ b/meson/test cases/common/157 custom target subdir depend files/subdir/meson.build @@ -0,0 +1,6 @@ +foo_src = custom_target('foo_src', + depend_files : 'dep.dat', + input : 'foo.c.in', + output : 'foo.c', + command : [copy, '@INPUT@', '@OUTPUT@'] +) diff --git a/meson/test cases/common/158 disabler/meson.build b/meson/test cases/common/158 disabler/meson.build new file mode 100644 index 000000000..d132e2b52 --- /dev/null +++ b/meson/test cases/common/158 disabler/meson.build @@ -0,0 +1,153 @@ +project('dolphin option', 'c') + +d = disabler() + +full_path = d.full_path() +assert(is_disabler(full_path), 'Method call is not a disabler') + +d2 = dependency(d) +d3 = (d == d2) +d4 = d + 0 +d5 = d2 or true +set_variable('d6', disabler()) + +has_not_changed = false +if is_disabler(d) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Disabler has changed.') + +assert(is_disabler(d), 'Disabler was not identified correctly.') +assert(is_disabler(d2), 'Function laundered disabler was not identified correctly.') +assert(is_disabler(d3), 'Disabler comparison should yield disabler.') +assert(is_disabler(d4), 'Disabler addition should yield disabler.') +assert(is_disabler(d5), 'Disabler logic op should yield disabler.') +assert(is_disabler(d6), 'set_variable with a disabler should set variable to disabler.') + +assert(d, 'Disabler did not cause this to be skipped.') +assert(d2, 'Function laundered disabler did not cause this to be skipped.') +assert(d3, 'Disabler comparison should yield disabler and thus this would not be called.') +assert(d4, 'Disabler addition should yield disabler and thus this would not be called.') +assert(d5, 'Disabler logic op should yield disabler and thus this would not be called.') +assert(d6, 'set_variable with a disabler did not cause this to be skipped.') + +number = 0 + +if d + number = 1 +else + number = 2 +endif + +has_not_changed = false +if is_disabler(number) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Number has changed.') + +assert(not is_disabler(number), 'Number should not be a disabler.') +assert(number == 0, 'Plain if handled incorrectly, value should be 0 but is @0@'.format(number)) + +if d.found() + number = 1 +else + number = 2 +endif + +assert(number == 2, 'If found handled incorrectly, value should be 2 but is @0@'.format(number)) + +dep = dependency('notfounddep', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) +assert(is_disabler(app), 'App is not a disabler.') +app = executable('myapp', 'notfound.c', dependencies : [[dep]]) +assert(is_disabler(app), 'App is not a disabler.') + +cc = meson.get_compiler('c') +dep = cc.find_library('notfounddep', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) +assert(is_disabler(app), 'App is not a disabler.') + +dep = find_program('donotfindme', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) +assert(is_disabler(app), 'App is not a disabler.') + +has_not_changed = false +if is_disabler(app) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'App has changed.') + +assert(not is_disabler(is_variable('d6')), 'is_variable should not return a disabler') +assert(is_variable('d6'), 'is_variable for a disabler should return true') + +if_is_not_disabled = false +if is_variable('d6') + if_is_not_disabled = true +else + if_is_not_disabled = true +endif +assert(if_is_not_disabled, 'Disabler in is_variable should not skip blocks') + +get_d = get_variable('d6') +assert(is_disabler(get_d), 'get_variable should yield a disabler') + +get_fallback_d = get_variable('nonexistant', disabler()) +assert(is_disabler(get_fallback_d), 'get_variable fallback should yield a disabler') + +var_true = true +get_no_fallback_d = get_variable('var_true', disabler()) +assert(not is_disabler(get_no_fallback_d), 'get_variable should not fallback to disabler') +assert(get_no_fallback_d, 'get_variable should yield true') + +assert(is_disabler(get_variable(disabler())), 'get_variable should yield a disabler') +assert(is_disabler(get_variable(disabler(), var_true)), 'get_variable should yield a disabler') + +if_is_disabled = true +if disabler() + if_is_disabled = false +else + if_is_disabled = false +endif +assert(if_is_disabled, 'Disabler in "if condition" must skip both blocks') + +if not disabler() + if_is_disabled = false +else + if_is_disabled = false +endif +assert(if_is_disabled, 'Disabler in "if not condition" must skip both blocks') + +if disabler() == 1 + if_is_disabled = false +else + if_is_disabled = false +endif +assert(if_is_disabled, 'Disabler in "if a==b" must skip both blocks') + +loops = 0 +disablers = 0 +foreach i : [true, disabler(), true] + loops += 1 + if is_disabler(i) + disablers += 1 + endif +endforeach +assert(loops == 3, 'Disabler in foreach array') +assert(disablers == 1, 'Disabler in foreach array') + +loops = 0 +disablers = 0 +foreach k, i : {'a': true, 'b': disabler(), 'c': true} + loops += 1 + if is_disabler(i) + disablers += 1 + endif +endforeach +assert(loops == 3, 'Disabler in foreach dict') +assert(disablers == 1, 'Disabler in foreach dict') diff --git a/meson/test cases/common/159 array option/meson.build b/meson/test cases/common/159 array option/meson.build new file mode 100644 index 000000000..034b9a545 --- /dev/null +++ b/meson/test cases/common/159 array option/meson.build @@ -0,0 +1,17 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project('array default options') + +assert(get_option('array') == ['foo', 'bar'], 'Default value for array is not equal to choices') diff --git a/meson/test cases/common/159 array option/meson_options.txt b/meson/test cases/common/159 array option/meson_options.txt new file mode 100644 index 000000000..7ed0ac14d --- /dev/null +++ b/meson/test cases/common/159 array option/meson_options.txt @@ -0,0 +1,19 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +option( + 'array', + type : 'array', + choices : ['foo', 'bar'], +) diff --git a/meson/test cases/common/16 comparison/meson.build b/meson/test cases/common/16 comparison/meson.build new file mode 100644 index 000000000..bba01684a --- /dev/null +++ b/meson/test cases/common/16 comparison/meson.build @@ -0,0 +1,154 @@ +project('comparison', 'c') + +# Compare equality of strings + +var1 = 'foo' +var2 = 'bar' + +if var1 == var2 + exe1 = executable('broken', 'broken.c') +else + exe1 = executable('prog1', 'prog.c') +endif + +if var1 == var1 + exe2 = executable('prog2', 'prog.c') +else + exe2 = executable('broken', 'broken.c') +endif + +if var1 != var2 + exe3 = executable('prog3', 'prog.c') +else + exe3 = executable('broken', 'broken.c') +endif + +if var1 != var1 + exe4 = executable('broken', 'broken.c') +else + exe4 = executable('prog4', 'prog.c') +endif + +test('equalfalse', exe1) +test('equaltrue', exe2) +test('nequaltrue', exe3) +test('nequalfalse', exe4) + +# Non-equality comparisons + +var3 = 3 +var4 = 4 + +if var3 < var4 + exe5 = executable('prog5', 'prog.c') +else + exe5 = executable('broken', 'broken.c') +endif + +if var3 < var3 + exe6 = executable('broken', 'broken.c') +else + exe6 = executable('prog6', 'prog.c') +endif + +if var4 > var3 + exe7 = executable('prog7', 'prog.c') +else + exe7 = executable('broken', 'broken.c') +endif + +if var3 > var3 + exe8 = executable('broken', 'broken.c') +else + exe8 = executable('prog8', 'prog.c') +endif + +if var4 <= var3 + exe9 = executable('broken', 'broken.c') +else + exe9 = executable('prog9', 'prog.c') +endif + +if var3 <= var3 + exe10 = executable('prog10', 'prog.c') +else + exe10 = executable('broken', 'broken.c') +endif + +if var3 >= var4 + exe11 = executable('broken', 'broken.c') +else + exe11 = executable('prog11', 'prog.c') +endif + +if var3 >= var3 + exe12 = executable('prog12', 'prog.c') +else + exe12 = executable('broken', 'broken.c') +endif + +test('lttrue', exe5) +test('ltfalse', exe6) +test('gttrue', exe7) +test('gtfalse', exe8) +test('lefalse', exe9) +test('letrue', exe10) +test('gefalse', exe11) +test('getrue', exe12) + +# Non-elementary type comparisons + +if exe1 == exe2 + exe13 = executable('broken', 'broken.c') +else + exe13 = executable('prog13', 'prog.c') +endif + +if exe1 == exe1 + exe14 = executable('prog14', 'prog.c') +else + exe14 = executable('broken', 'broken.c') +endif + +if exe1 != exe2 + exe15 = executable('prog15', 'prog.c') +else + exe15 = executable('broken', 'broken.c') +endif + +if exe1 != exe1 + exe16 = executable('broken', 'broken.c') +else + exe16 = executable('prog16', 'prog.c') +endif + +test('equalfalse', exe13) +test('equaltrue', exe14) +test('nequaltrue', exe15) +test('nequalfalse', exe16) + +# Equality comparisons of different elementary types +# (these all cause warnings currently, will become an error in future) + +assert([] != 'st', 'not equal') +assert([] != 1, 'not equal') +assert(2 != 'st', 'not equal') + +assert(not ([] == 'st'), 'not equal') +assert(not ([] == 1), 'not equal') +assert(not (2 == 'st'), 'not equal') + +# "in" and "not in" operators + +assert(1 in [1, 2], '''1 should be in [1, 2]''') +assert(3 not in [1, 2], '''3 shouldn't be in [1, 2]''') +assert(not (3 in [1, 2]), '''3 shouldn't be in [1, 2]''') + +assert('b' in ['a', 'b'], ''''b' should be in ['a', 'b']''') +assert('c' not in ['a', 'b'], ''''c' shouldn't be in ['a', 'b']''') + +assert(exe1 in [exe1, exe2], ''''exe1 should be in [exe1, exe2]''') +assert(exe3 not in [exe1, exe2], ''''exe3 shouldn't be in [exe1, exe2]''') + +assert('a' in {'a': 'b'}, '''1 should be in {'a': 'b'}''') +assert('b' not in {'a': 'b'}, '''1 should be in {'a': 'b'}''') diff --git a/meson/test cases/common/16 comparison/prog.c b/meson/test cases/common/16 comparison/prog.c new file mode 100644 index 000000000..78f2de106 --- /dev/null +++ b/meson/test cases/common/16 comparison/prog.c @@ -0,0 +1 @@ +int main(void) { return 0; } diff --git a/meson/test cases/common/160 custom target template substitution/checkcopy.py b/meson/test cases/common/160 custom target template substitution/checkcopy.py new file mode 100644 index 000000000..ab9f436ce --- /dev/null +++ b/meson/test cases/common/160 custom target template substitution/checkcopy.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +if '@INPUT1@' in sys.argv[1]: + shutil.copyfile(sys.argv[2], sys.argv[3]) +else: + sys.exit('String @INPUT1@ not found in "{}"'.format(sys.argv[1])) diff --git a/meson/test cases/common/160 custom target template substitution/foo.c.in b/meson/test cases/common/160 custom target template substitution/foo.c.in new file mode 100644 index 000000000..a867b3237 --- /dev/null +++ b/meson/test cases/common/160 custom target template substitution/foo.c.in @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("foo is working.\n"); + return 0; +} diff --git a/meson/test cases/common/160 custom target template substitution/meson.build b/meson/test cases/common/160 custom target template substitution/meson.build new file mode 100644 index 000000000..737408ebf --- /dev/null +++ b/meson/test cases/common/160 custom target template substitution/meson.build @@ -0,0 +1,17 @@ +project('custom target template substitution', 'c') + +check = find_program('checkcopy.py') + +config = configuration_data() + +config_file = configure_file(configuration : config, output : 'x@IN') + +# Check that substitution does not find @FOO@ and then misses @INPUT0@. +# Check the resulting x@INPUT1@ is not replaced. +foo = custom_target('runcheck', + input : [config_file, 'foo.c.in'], + output : 'foo.c', + command : [check, '-D@FOO@INPUT0@PUT1@', '@INPUT1@', '@OUTPUT@'] +) + +executable('foo', foo) diff --git a/meson/test cases/common/161 not-found dependency/meson.build b/meson/test cases/common/161 not-found dependency/meson.build new file mode 100644 index 000000000..02072b6a0 --- /dev/null +++ b/meson/test cases/common/161 not-found dependency/meson.build @@ -0,0 +1,14 @@ +project('dep-test', 'c') + +dep = dependency('', required:false) +if dep.found() + error('not-found dependency was found') +endif + +assert(dep.type_name() == 'not-found', 'dependency should be of type "not-found" not ' + dep.type_name()) + +library('testlib', 'testlib.c', dependencies: [dep]) +subdir('sub', if_found: dep) + +subdep = dependency('', fallback: ['trivial', 'trivial_dep']) +missing = dependency('', fallback: ['missing', 'missing_dep'], required: false) diff --git a/meson/test cases/common/161 not-found dependency/sub/meson.build b/meson/test cases/common/161 not-found dependency/sub/meson.build new file mode 100644 index 000000000..2a33cae06 --- /dev/null +++ b/meson/test cases/common/161 not-found dependency/sub/meson.build @@ -0,0 +1 @@ +error('should be disabled by subdir(if_found:)') diff --git a/meson/test cases/common/161 not-found dependency/subprojects/trivial/meson.build b/meson/test cases/common/161 not-found dependency/subprojects/trivial/meson.build new file mode 100644 index 000000000..8769c7008 --- /dev/null +++ b/meson/test cases/common/161 not-found dependency/subprojects/trivial/meson.build @@ -0,0 +1,3 @@ +project('trivial subproject', 'c') +trivial_lib = static_library('trivial', 'trivial.c', install: false) +trivial_dep = declare_dependency(link_with: trivial_lib) diff --git a/meson/test cases/common/161 not-found dependency/subprojects/trivial/trivial.c b/meson/test cases/common/161 not-found dependency/subprojects/trivial/trivial.c new file mode 100644 index 000000000..cb0c02fcc --- /dev/null +++ b/meson/test cases/common/161 not-found dependency/subprojects/trivial/trivial.c @@ -0,0 +1,3 @@ +int subfunc(void) { + return 42; +} diff --git a/meson/test cases/common/161 not-found dependency/testlib.c b/meson/test cases/common/161 not-found dependency/testlib.c new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/162 subdir if_found/meson.build b/meson/test cases/common/162 subdir if_found/meson.build new file mode 100644 index 000000000..2c640cfad --- /dev/null +++ b/meson/test cases/common/162 subdir if_found/meson.build @@ -0,0 +1,11 @@ +project('subdir if found', 'c') + +found_dep = declare_dependency() +not_found_dep = dependency('nonexisting', required : false) + +subdir('nonexisting_dir', if_found : not_found_dep) + +variable = 3 + +subdir('subdir', if_found : found_dep) +assert(variable == 5, 'Subdir was not properly entered.') diff --git a/meson/test cases/common/162 subdir if_found/subdir/meson.build b/meson/test cases/common/162 subdir if_found/subdir/meson.build new file mode 100644 index 000000000..1030e2551 --- /dev/null +++ b/meson/test cases/common/162 subdir if_found/subdir/meson.build @@ -0,0 +1 @@ +variable = 5 diff --git a/meson/test cases/common/163 default options prefix dependent defaults/meson.build b/meson/test cases/common/163 default options prefix dependent defaults/meson.build new file mode 100644 index 000000000..9ca4ec552 --- /dev/null +++ b/meson/test cases/common/163 default options prefix dependent defaults/meson.build @@ -0,0 +1 @@ +project('default options prefix dependent defaults ', 'c', default_options : ['sharedstatedir=/sharedstate', 'prefix=/usr']) diff --git a/meson/test cases/common/164 dependency factory/meson.build b/meson/test cases/common/164 dependency factory/meson.build new file mode 100644 index 000000000..9488cd41a --- /dev/null +++ b/meson/test cases/common/164 dependency factory/meson.build @@ -0,0 +1,66 @@ +project('dependency factory', 'c', meson_version : '>=0.53') + +dep = dependency('gl', method: 'pkg-config', required: false) +if dep.found() + assert(dep.type_name() == 'pkgconfig') + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('SDL2', method: 'pkg-config', required: false) +if dep.found() + assert(dep.type_name() == 'pkgconfig') + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('SDL2', method: 'config-tool', required: false) +if dep.found() + assert(dep.type_name() == 'config-tool') + dep.get_configtool_variable('prefix') +endif + +dep = dependency('Vulkan', method: 'pkg-config', required: false) +if dep.found() + assert(dep.type_name() == 'pkgconfig') + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('pcap', method: 'pkg-config', required: false) +if dep.found() + assert(dep.type_name() == 'pkgconfig') + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('pcap', method: 'config-tool', required: false) +if dep.found() + assert(dep.type_name() == 'config-tool') + dep.get_configtool_variable('prefix') +endif + +dep = dependency('cups', method: 'pkg-config', required: false) +if dep.found() + assert(dep.type_name() == 'pkgconfig') + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('cups', method: 'config-tool', required: false) +if dep.found() + assert(dep.type_name() == 'config-tool') + dep.get_configtool_variable('prefix') +endif + +dep = dependency('libwmf', method: 'pkg-config', required: false) +if dep.found() + assert(dep.type_name() == 'pkgconfig') + dep.get_pkgconfig_variable('prefix') +endif + +dep = dependency('libwmf', method: 'config-tool', required: false) +if dep.found() + assert(dep.type_name() == 'config-tool') + dep.get_configtool_variable('prefix') +endif + +dep = dependency('boost', method: 'system', required: false) +if dep.found() + assert(dep.type_name() == 'system') +endif diff --git a/meson/test cases/common/165 get project license/bar.c b/meson/test cases/common/165 get project license/bar.c new file mode 100644 index 000000000..f1bd822dc --- /dev/null +++ b/meson/test cases/common/165 get project license/bar.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I'm a main project bar.\n"); + return 0; +} diff --git a/meson/test cases/common/165 get project license/meson.build b/meson/test cases/common/165 get project license/meson.build new file mode 100644 index 000000000..e7e9deb85 --- /dev/null +++ b/meson/test cases/common/165 get project license/meson.build @@ -0,0 +1,8 @@ +project('bar', 'c', license: 'Apache-2.0') + +executable('bar', 'bar.c') + +license = meson.project_license()[0] +if license != 'Apache-2.0' + error('The license should be Apache-2.0, but it is: ' + license) +endif diff --git a/meson/test cases/common/166 yield/meson.build b/meson/test cases/common/166 yield/meson.build new file mode 100644 index 000000000..9b115696f --- /dev/null +++ b/meson/test cases/common/166 yield/meson.build @@ -0,0 +1,7 @@ +project('yield_options', 'c') + +subproject('sub') + +assert(get_option('unshared_option') == 'one', 'Unshared option has wrong value in superproject.') +assert(get_option('shared_option') == 'two', 'Shared option has wrong value in superproject..') +assert(get_option('wrongtype_option') == 'three', 'Wrongtype option has wrong value in superproject..') diff --git a/meson/test cases/common/166 yield/meson_options.txt b/meson/test cases/common/166 yield/meson_options.txt new file mode 100644 index 000000000..9f58fbbc8 --- /dev/null +++ b/meson/test cases/common/166 yield/meson_options.txt @@ -0,0 +1,3 @@ +option('unshared_option', type : 'string', value : 'one') +option('shared_option', type : 'string', value : 'two') +option('wrongtype_option', type : 'string', value : 'three') diff --git a/meson/test cases/common/166 yield/subprojects/sub/meson.build b/meson/test cases/common/166 yield/subprojects/sub/meson.build new file mode 100644 index 000000000..832c13cc2 --- /dev/null +++ b/meson/test cases/common/166 yield/subprojects/sub/meson.build @@ -0,0 +1,5 @@ +project('subbie', 'c') + +assert(get_option('unshared_option') == 'three', 'Unshared option has wrong value in subproject.') +assert(get_option('shared_option') == 'two', 'Shared option has wrong value in subproject.') +assert(get_option('wrongtype_option') == true, 'Wrongtype option has wrong value in subproject.') diff --git a/meson/test cases/common/166 yield/subprojects/sub/meson_options.txt b/meson/test cases/common/166 yield/subprojects/sub/meson_options.txt new file mode 100644 index 000000000..101122eeb --- /dev/null +++ b/meson/test cases/common/166 yield/subprojects/sub/meson_options.txt @@ -0,0 +1,3 @@ +option('unshared_option', type : 'string', value : 'three', yield : false) +option('shared_option', type : 'string', value : 'four', yield : true) +option('wrongtype_option', type : 'boolean', value : true, yield : true) diff --git a/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/a.c b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/a.c new file mode 100644 index 000000000..f9848c136 --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/a.c @@ -0,0 +1,15 @@ +int func2(void); + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func(void) { return func2(); } + diff --git a/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build new file mode 100644 index 000000000..1014db1be --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/meson.build @@ -0,0 +1,4 @@ +project('alpha project', 'c', subproject_dir: 'var/subprojects') + +b = subproject('beta') +l = library('a', 'a.c', link_with : b.get_variable('lb')) diff --git a/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here new file mode 100644 index 000000000..8d1c8b69c --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/alpha/var/subprojects/wrap_files_might_be_here @@ -0,0 +1 @@ + diff --git a/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/b.c b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/b.c new file mode 100644 index 000000000..8c07177a6 --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/b.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func2(void) { + return 42; +} diff --git a/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/meson.build b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/meson.build new file mode 100644 index 000000000..1720d3e7c --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/contrib/subprojects/beta/meson.build @@ -0,0 +1,4 @@ +project('beta project', 'c') + +lb = shared_library('b', 'b.c') +notfound = dependency('', required : false) diff --git a/meson/test cases/common/167 subproject nested subproject dirs/meson.build b/meson/test cases/common/167 subproject nested subproject dirs/meson.build new file mode 100644 index 000000000..875eed350 --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/meson.build @@ -0,0 +1,11 @@ +project('gamma project', 'c', subproject_dir: 'contrib/subprojects') + +a = subproject('alpha') +lib = a.get_variable('l') + +# Ensure that the dependency version is not checked for a not-found dependency +notfound = dependency('', version : '>=1.0', required : false, + fallback : ['beta', 'notfound']) + +exe = executable('prog', 'prog.c', link_with : lib) +test('basic', exe) diff --git a/meson/test cases/common/167 subproject nested subproject dirs/prog.c b/meson/test cases/common/167 subproject nested subproject dirs/prog.c new file mode 100644 index 000000000..27162c5e0 --- /dev/null +++ b/meson/test cases/common/167 subproject nested subproject dirs/prog.c @@ -0,0 +1,5 @@ +int func(void); + +int main(void) { + return func() == 42 ? 0 : 1; +} diff --git a/meson/test cases/common/168 preserve gendir/base.inp b/meson/test cases/common/168 preserve gendir/base.inp new file mode 100644 index 000000000..df967b96a --- /dev/null +++ b/meson/test cases/common/168 preserve gendir/base.inp @@ -0,0 +1 @@ +base diff --git a/meson/test cases/common/168 preserve gendir/com/mesonbuild/subbie.inp b/meson/test cases/common/168 preserve gendir/com/mesonbuild/subbie.inp new file mode 100644 index 000000000..df0f4e9ae --- /dev/null +++ b/meson/test cases/common/168 preserve gendir/com/mesonbuild/subbie.inp @@ -0,0 +1 @@ +subbie diff --git a/meson/test cases/common/168 preserve gendir/genprog.py b/meson/test cases/common/168 preserve gendir/genprog.py new file mode 100755 index 000000000..681c43a84 --- /dev/null +++ b/meson/test cases/common/168 preserve gendir/genprog.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 + +import os, sys, argparse + +h_templ = '''#pragma once + +int %s(void); +''' + +c_templ = '''#include"%s.h" + +int %s(void) { + return 0; +} +''' + +parser = argparse.ArgumentParser() +parser.add_argument('--searchdir', required=True) +parser.add_argument('--outdir', required=True) +parser.add_argument('ifiles', nargs='+') + +options = parser.parse_args() + +searchdir = options.searchdir +outdir = options.outdir +ifiles = options.ifiles + +rel_ofiles = [] + +for ifile in ifiles: + if not ifile.startswith(options.searchdir): + sys.exit(f'Input file {ifile} does not start with search dir {searchdir}.') + rel_ofile = ifile[len(searchdir):] + if rel_ofile[0] == '/' or rel_ofile[0] == '\\': + rel_ofile = rel_ofile[1:] + rel_ofiles.append(os.path.splitext(rel_ofile)[0]) + +ofile_bases = [os.path.join(outdir, i) for i in rel_ofiles] + +for i, ifile_name in enumerate(ifiles): + proto_name = open(ifile_name).readline().strip() + h_out = ofile_bases[i] + '.h' + c_out = ofile_bases[i] + '.c' + os.makedirs(os.path.split(ofile_bases[i])[0], exist_ok=True) + open(h_out, 'w').write(h_templ % (proto_name)) + open(c_out, 'w').write(c_templ % (proto_name, proto_name)) diff --git a/meson/test cases/common/168 preserve gendir/meson.build b/meson/test cases/common/168 preserve gendir/meson.build new file mode 100644 index 000000000..ce219f0d8 --- /dev/null +++ b/meson/test cases/common/168 preserve gendir/meson.build @@ -0,0 +1,13 @@ +project('preserve subdir', 'c') + +gprog = find_program('genprog.py') + +gen = generator(gprog, \ + output : ['@BASENAME@.c', '@BASENAME@.h'], + arguments : ['--searchdir=@CURRENT_SOURCE_DIR@', '--outdir=@BUILD_DIR@', '@INPUT@']) + +generated = gen.process('base.inp', 'com/mesonbuild/subbie.inp', + preserve_path_from : meson.current_source_dir()) + +e = executable('testprog', 'testprog.c', generated) +test('testprog', e) diff --git a/meson/test cases/common/168 preserve gendir/testprog.c b/meson/test cases/common/168 preserve gendir/testprog.c new file mode 100644 index 000000000..b6a99268a --- /dev/null +++ b/meson/test cases/common/168 preserve gendir/testprog.c @@ -0,0 +1,6 @@ +#include"base.h" +#include"com/mesonbuild/subbie.h" + +int main(void) { + return base() + subbie(); +} diff --git a/meson/test cases/common/169 source in dep/bar.cpp b/meson/test cases/common/169 source in dep/bar.cpp new file mode 100644 index 000000000..2ea623bf8 --- /dev/null +++ b/meson/test cases/common/169 source in dep/bar.cpp @@ -0,0 +1,5 @@ +extern "C" int foo(void); + +int main(void) { + return foo() != 42; +} diff --git a/meson/test cases/common/169 source in dep/foo.c b/meson/test cases/common/169 source in dep/foo.c new file mode 100644 index 000000000..c1be8d053 --- /dev/null +++ b/meson/test cases/common/169 source in dep/foo.c @@ -0,0 +1,3 @@ +int foo(void) { + return 42; +} diff --git a/meson/test cases/common/169 source in dep/generated/funname b/meson/test cases/common/169 source in dep/generated/funname new file mode 100644 index 000000000..79f3c867f --- /dev/null +++ b/meson/test cases/common/169 source in dep/generated/funname @@ -0,0 +1 @@ +my_wonderful_function diff --git a/meson/test cases/common/169 source in dep/generated/genheader.py b/meson/test cases/common/169 source in dep/generated/genheader.py new file mode 100755 index 000000000..489db23d3 --- /dev/null +++ b/meson/test cases/common/169 source in dep/generated/genheader.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python3 + +import sys + +ifile = sys.argv[1] +ofile = sys.argv[2] + +templ = '''#pragma once + +int %s(void) { + return 42; +} +''' + +funname = open(ifile).readline().strip() + +open(ofile, 'w').write(templ % funname) diff --git a/meson/test cases/common/169 source in dep/generated/main.c b/meson/test cases/common/169 source in dep/generated/main.c new file mode 100644 index 000000000..1ab980ac6 --- /dev/null +++ b/meson/test cases/common/169 source in dep/generated/main.c @@ -0,0 +1,5 @@ +#include"funheader.h" + +int main(void) { + return my_wonderful_function() != 42; +} diff --git a/meson/test cases/common/169 source in dep/generated/meson.build b/meson/test cases/common/169 source in dep/generated/meson.build new file mode 100644 index 000000000..286270901 --- /dev/null +++ b/meson/test cases/common/169 source in dep/generated/meson.build @@ -0,0 +1,12 @@ +fp = find_program('genheader.py') + +genh = custom_target('genh', + input : 'funname', + output : 'funheader.h', + command : [fp, '@INPUT@', '@OUTPUT@']) + +dep = declare_dependency(sources : [genh]) + +e = executable('genuser', 'main.c', + dependencies : dep) +test('genuser', e) diff --git a/meson/test cases/common/169 source in dep/meson.build b/meson/test cases/common/169 source in dep/meson.build new file mode 100644 index 000000000..7111ba11f --- /dev/null +++ b/meson/test cases/common/169 source in dep/meson.build @@ -0,0 +1,8 @@ +project('foo', 'c', 'cpp') + +dep = declare_dependency(sources : 'foo.c') + +executable('bar', 'bar.cpp', + dependencies : dep) + +subdir('generated') diff --git a/meson/test cases/common/17 array/func.c b/meson/test cases/common/17 array/func.c new file mode 100644 index 000000000..a324dca21 --- /dev/null +++ b/meson/test cases/common/17 array/func.c @@ -0,0 +1 @@ +int func(void) { return 0; } diff --git a/meson/test cases/common/17 array/meson.build b/meson/test cases/common/17 array/meson.build new file mode 100644 index 000000000..0d1737488 --- /dev/null +++ b/meson/test cases/common/17 array/meson.build @@ -0,0 +1,8 @@ +project('array test', 'c') + +arr = [ + 'func.c', + 'prog.c'] + +exe = executable('prog', sources : arr) +test('arr test', exe) diff --git a/meson/test cases/common/17 array/prog.c b/meson/test cases/common/17 array/prog.c new file mode 100644 index 000000000..f794e1b1f --- /dev/null +++ b/meson/test cases/common/17 array/prog.c @@ -0,0 +1,3 @@ +extern int func(void); + +int main(void) { return func(); } diff --git a/meson/test cases/common/170 generator link whole/export.h b/meson/test cases/common/170 generator link whole/export.h new file mode 100644 index 000000000..f4f6f451b --- /dev/null +++ b/meson/test cases/common/170 generator link whole/export.h @@ -0,0 +1,18 @@ +#pragma once + +#if defined BUILDING_EMBEDDED + #define DLL_PUBLIC +#elif defined _WIN32 || defined __CYGWIN__ + #if defined BUILDING_DLL + #define DLL_PUBLIC __declspec(dllexport) + #else + #define DLL_PUBLIC __declspec(dllimport) + #endif +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif diff --git a/meson/test cases/common/170 generator link whole/generator.py b/meson/test cases/common/170 generator link whole/generator.py new file mode 100755 index 000000000..18a6cc2f5 --- /dev/null +++ b/meson/test cases/common/170 generator link whole/generator.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 + +import os +import os.path +import sys + + +def main(): + name = os.path.splitext(os.path.basename(sys.argv[1]))[0] + out = sys.argv[2] + hname = os.path.join(out, name + '.h') + cname = os.path.join(out, name + '.c') + print(os.getcwd(), hname) + with open(hname, 'w') as hfile: + hfile.write(''' +#pragma once +#include "export.h" +int DLL_PUBLIC {name}(void); +'''.format(name=name)) + with open(cname, 'w') as cfile: + cfile.write(''' +#include "{name}.h" +int {name}(void) {{ + return {size}; +}} +'''.format(name=name, size=len(name))) + + +if __name__ == '__main__': + main() diff --git a/meson/test cases/common/170 generator link whole/main.c b/meson/test cases/common/170 generator link whole/main.c new file mode 100644 index 000000000..7605022d8 --- /dev/null +++ b/meson/test cases/common/170 generator link whole/main.c @@ -0,0 +1,11 @@ +#include "meson_test_function.h" + +#include + +int main(void) { + if (meson_test_function() != 19) { + printf("Bad meson_test_function()\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/170 generator link whole/meson.build b/meson/test cases/common/170 generator link whole/meson.build new file mode 100644 index 000000000..f5d3e5884 --- /dev/null +++ b/meson/test cases/common/170 generator link whole/meson.build @@ -0,0 +1,62 @@ +project('generator link_whole', 'c') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: whole-archive not supported in Xcode. Patches welcome.') +endif + +# This just generates foo.h and foo.c with int foo() defined. +gen_py = find_program('generator.py') +gen = generator(gen_py, + output: ['@BASENAME@.h', '@BASENAME@.c'], + arguments : ['@INPUT@', '@BUILD_DIR@']) + +# Test 1: link directly into executable +srcs = gen.process('meson_test_function.tmpl') +exe = executable('exe1', [srcs, 'main.c'], c_args : '-DBUILDING_EMBEDDED') +test('test1', exe) + +# Test 2: link into shared library and access from executable +srcs = gen.process('meson_test_function.tmpl') +shlib2 = shared_library('shlib2', [srcs], c_args : '-DBUILDING_DLL') +exe = executable('exe2', 'main.c', + link_with : shlib2, + include_directories : shlib2.private_dir_include(), +) +test('test2', exe) + +# Test 3: link into static library and access from executable +srcs = gen.process('meson_test_function.tmpl') +stlib3 = static_library('stlib3', [srcs], c_args : '-DBUILDING_EMBEDDED') +exe = executable('exe3', 'main.c', + c_args : '-DBUILDING_EMBEDDED', + link_with : stlib3, + include_directories : stlib3.private_dir_include(), +) +test('test3', exe) + +# Test 4: link into static library, link into shared +# and access from executable. To make sure static_library +# is not dropped use pull_meson_test_function helper. +srcs = gen.process('meson_test_function.tmpl') +stlib4 = static_library('stlib4', [srcs], c_args : '-DBUILDING_DLL') +shlib4 = shared_library('shlib4', 'pull_meson_test_function.c', + c_args : '-DBUILDING_DLL', + link_with : stlib4, + include_directories : stlib4.private_dir_include(), +) +exe = executable('exe4', 'main.c', + link_with : shlib4, + include_directories : stlib4.private_dir_include(), +) +test('test4', exe) + +# Test 5: link into static library, link_whole into shared +# and access from executable +srcs = gen.process('meson_test_function.tmpl') +stlib5 = static_library('stlib5', [srcs], c_args : '-DBUILDING_DLL') +shlib5 = shared_library('shlib5', link_whole : stlib5) +exe = executable('exe5', 'main.c', + link_with : shlib5, + include_directories : stlib5.private_dir_include(), +) +test('test5', exe) diff --git a/meson/test cases/common/170 generator link whole/meson_test_function.tmpl b/meson/test cases/common/170 generator link whole/meson_test_function.tmpl new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/170 generator link whole/pull_meson_test_function.c b/meson/test cases/common/170 generator link whole/pull_meson_test_function.c new file mode 100644 index 000000000..23d24acf4 --- /dev/null +++ b/meson/test cases/common/170 generator link whole/pull_meson_test_function.c @@ -0,0 +1,6 @@ +#include "export.h" +#include "meson_test_function.h" + +int DLL_PUBLIC function_puller(void) { + return meson_test_function(); +} diff --git a/meson/test cases/common/171 initial c_args/meson.build b/meson/test cases/common/171 initial c_args/meson.build new file mode 100644 index 000000000..638f8c277 --- /dev/null +++ b/meson/test cases/common/171 initial c_args/meson.build @@ -0,0 +1,7 @@ +project('options', 'c') + +# Test passing c_args and c_link_args options from the command line. +assert(get_option('c_args') == ['-funroll-loops'], + 'Incorrect value for c_args option.') +assert(get_option('c_link_args') == ['-Dtest_harmless_but_useless_link_arg'], + 'Incorrect value for c_link_args option.') diff --git a/meson/test cases/common/171 initial c_args/test.json b/meson/test cases/common/171 initial c_args/test.json new file mode 100644 index 000000000..f9b73a47d --- /dev/null +++ b/meson/test cases/common/171 initial c_args/test.json @@ -0,0 +1,8 @@ +{ + "matrix": { + "options": { + "c_args": [{ "val": "-funroll-loops" }], + "c_link_args": [{ "val": "-Dtest_harmless_but_useless_link_arg" }] + } + } +} diff --git a/meson/test cases/common/172 identical target name in subproject flat layout/foo.c b/meson/test cases/common/172 identical target name in subproject flat layout/foo.c new file mode 100644 index 000000000..ed427899a --- /dev/null +++ b/meson/test cases/common/172 identical target name in subproject flat layout/foo.c @@ -0,0 +1 @@ +int meson_test_main_foo(void) { return 10; } diff --git a/meson/test cases/common/172 identical target name in subproject flat layout/main.c b/meson/test cases/common/172 identical target name in subproject flat layout/main.c new file mode 100644 index 000000000..6f02aeb82 --- /dev/null +++ b/meson/test cases/common/172 identical target name in subproject flat layout/main.c @@ -0,0 +1,16 @@ +#include + +int meson_test_main_foo(void); +int meson_test_subproj_foo(void); + +int main(void) { + if (meson_test_main_foo() != 10) { + printf("Failed meson_test_main_foo\n"); + return 1; + } + if (meson_test_subproj_foo() != 20) { + printf("Failed meson_test_subproj_foo\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/172 identical target name in subproject flat layout/meson.build b/meson/test cases/common/172 identical target name in subproject flat layout/meson.build new file mode 100644 index 000000000..ce1d4b8e1 --- /dev/null +++ b/meson/test cases/common/172 identical target name in subproject flat layout/meson.build @@ -0,0 +1,15 @@ +project('subproject targets', 'c') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: many targets with the same name not supported in Xcode. Patches welcome.') +endif + +# Idea behind this test is to create targets with identical name +# but different output files. We can do this by choosing different +# name_prefix of libraries. Target id does not depend on name_prefix. + +main_foo = static_library('foo', 'foo.c', name_prefix : 'main') +subproj_foo = subproject('subproj').get_variable('foo') + +exe = executable('prog', 'main.c', link_with : [main_foo, subproj_foo]) +test('main test', exe) diff --git a/meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/foo.c b/meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/foo.c new file mode 100644 index 000000000..f33429229 --- /dev/null +++ b/meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/foo.c @@ -0,0 +1 @@ +int meson_test_subproj_foo(void) { return 20; } diff --git a/meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/meson.build b/meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/meson.build new file mode 100644 index 000000000..c92719469 --- /dev/null +++ b/meson/test cases/common/172 identical target name in subproject flat layout/subprojects/subproj/meson.build @@ -0,0 +1,3 @@ +project('subproj', 'c') + +foo = static_library('foo', 'foo.c', name_prefix : 'subproj') diff --git a/meson/test cases/common/173 as-needed/config.h b/meson/test cases/common/173 as-needed/config.h new file mode 100644 index 000000000..b8fb60fe4 --- /dev/null +++ b/meson/test cases/common/173 as-needed/config.h @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #if defined BUILDING_DLL + #define DLL_PUBLIC __declspec(dllexport) + #else + #define DLL_PUBLIC __declspec(dllimport) + #endif +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif diff --git a/meson/test cases/common/173 as-needed/libA.cpp b/meson/test cases/common/173 as-needed/libA.cpp new file mode 100644 index 000000000..5f45bc077 --- /dev/null +++ b/meson/test cases/common/173 as-needed/libA.cpp @@ -0,0 +1,7 @@ +#define BUILDING_DLL + +#include "libA.h" + +namespace meson_test_as_needed { + DLL_PUBLIC bool linked = false; +} diff --git a/meson/test cases/common/173 as-needed/libA.h b/meson/test cases/common/173 as-needed/libA.h new file mode 100644 index 000000000..8e76d22fa --- /dev/null +++ b/meson/test cases/common/173 as-needed/libA.h @@ -0,0 +1,5 @@ +#include "config.h" + +namespace meson_test_as_needed { + DLL_PUBLIC extern bool linked; +} diff --git a/meson/test cases/common/173 as-needed/libB.cpp b/meson/test cases/common/173 as-needed/libB.cpp new file mode 100644 index 000000000..a8723941b --- /dev/null +++ b/meson/test cases/common/173 as-needed/libB.cpp @@ -0,0 +1,19 @@ +#include "libA.h" + +#undef DLL_PUBLIC +#define BUILDING_DLL +#include "config.h" + +namespace meson_test_as_needed { + namespace { + bool set_linked() { + linked = true; + return true; + } + bool stub = set_linked(); + } + + DLL_PUBLIC int libB_unused_func() { + return 0; + } +} diff --git a/meson/test cases/common/173 as-needed/main.cpp b/meson/test cases/common/173 as-needed/main.cpp new file mode 100644 index 000000000..a893431d4 --- /dev/null +++ b/meson/test cases/common/173 as-needed/main.cpp @@ -0,0 +1,7 @@ +#include + +#include "libA.h" + +int main(void) { + return !meson_test_as_needed::linked ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/meson/test cases/common/173 as-needed/meson.build b/meson/test cases/common/173 as-needed/meson.build new file mode 100644 index 000000000..3b54aaa02 --- /dev/null +++ b/meson/test cases/common/173 as-needed/meson.build @@ -0,0 +1,13 @@ +project('as-needed test', 'cpp') + +# Idea behind this test is to have -Wl,--as-needed prune +# away unneeded linkages, which would otherwise cause global +# static initialiser side-effects to set a boolean to true. + +# Credits for portable ISO C++ idea go to sarum9in + +libA = library('A', 'libA.cpp') +libB = library('B', 'libB.cpp', link_with : libA) + +main_exe = executable('C', 'main.cpp', link_with : [libA, libB]) +test('main test', main_exe) diff --git a/meson/test cases/common/174 ndebug if-release enabled/main.c b/meson/test cases/common/174 ndebug if-release enabled/main.c new file mode 100644 index 000000000..984ebcabe --- /dev/null +++ b/meson/test cases/common/174 ndebug if-release enabled/main.c @@ -0,0 +1,15 @@ +#include +#include + +int meson_test_side_effect = EXIT_FAILURE; + +int meson_test_set_side_effect(void) { + meson_test_side_effect = EXIT_SUCCESS; + return 1; +} + +int main(void) { + // meson_test_side_effect is set only if assert is executed + assert(meson_test_set_side_effect()); + return meson_test_side_effect; +} diff --git a/meson/test cases/common/174 ndebug if-release enabled/meson.build b/meson/test cases/common/174 ndebug if-release enabled/meson.build new file mode 100644 index 000000000..be263752a --- /dev/null +++ b/meson/test cases/common/174 ndebug if-release enabled/meson.build @@ -0,0 +1,7 @@ +project('ndebug enabled', 'c', + default_options : [ + 'buildtype=debugoptimized', + 'b_ndebug=if-release', + ]) + +test('exe', executable('main', 'main.c')) diff --git a/meson/test cases/common/175 ndebug if-release disabled/main.c b/meson/test cases/common/175 ndebug if-release disabled/main.c new file mode 100644 index 000000000..cb3ec3f65 --- /dev/null +++ b/meson/test cases/common/175 ndebug if-release disabled/main.c @@ -0,0 +1,7 @@ +#include +#include + +int main(void) { + assert(0); + return EXIT_SUCCESS; +} diff --git a/meson/test cases/common/175 ndebug if-release disabled/meson.build b/meson/test cases/common/175 ndebug if-release disabled/meson.build new file mode 100644 index 000000000..a9a79ea9b --- /dev/null +++ b/meson/test cases/common/175 ndebug if-release disabled/meson.build @@ -0,0 +1,7 @@ +project('ndebug disabled', 'c', + default_options : [ + 'buildtype=release', + 'b_ndebug=if-release', + ]) + +test('exe', executable('main', 'main.c')) diff --git a/meson/test cases/common/176 subproject version/meson.build b/meson/test cases/common/176 subproject version/meson.build new file mode 100644 index 000000000..bd8fc0392 --- /dev/null +++ b/meson/test cases/common/176 subproject version/meson.build @@ -0,0 +1,10 @@ +project('subproject version', 'c', + version : '2.3.4', + license: 'mylicense') + +subproject('a') + +liba_dep = dependency('a', + fallback: ['a', 'liba_dep'], + version: ['>= 0.30.0', '!= 0.99.0']) + diff --git a/meson/test cases/common/176 subproject version/subprojects/a/meson.build b/meson/test cases/common/176 subproject version/subprojects/a/meson.build new file mode 100644 index 000000000..dae31300f --- /dev/null +++ b/meson/test cases/common/176 subproject version/subprojects/a/meson.build @@ -0,0 +1,5 @@ +project('mysubproject', 'c', + version : '1.0.0', + license : 'sublicense') + +liba_dep = declare_dependency (version : '1.0.0') diff --git a/meson/test cases/common/177 subdir_done/meson.build b/meson/test cases/common/177 subdir_done/meson.build new file mode 100644 index 000000000..457e613b1 --- /dev/null +++ b/meson/test cases/common/177 subdir_done/meson.build @@ -0,0 +1,12 @@ +# Should run, even though main.cpp does not exist and we call error in the last line. +# subdir_done jumps to end, so both lines are not executed. + +project('example exit', 'cpp') + +if true + subdir_done() +endif + +executable('main', 'main.cpp') +error('Unreachable') + diff --git a/meson/test cases/common/178 bothlibraries/dummy.py b/meson/test cases/common/178 bothlibraries/dummy.py new file mode 100644 index 000000000..9e838bae9 --- /dev/null +++ b/meson/test cases/common/178 bothlibraries/dummy.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +from pathlib import Path +import sys + +if __name__ == '__main__': + Path(sys.argv[1]).write_text('Hello World\n') + raise SystemExit(0) diff --git a/meson/test cases/common/178 bothlibraries/libfile.c b/meson/test cases/common/178 bothlibraries/libfile.c new file mode 100644 index 000000000..f5e228ec1 --- /dev/null +++ b/meson/test cases/common/178 bothlibraries/libfile.c @@ -0,0 +1,7 @@ +#include "mylib.h" + +DO_EXPORT int retval = 42; + +DO_EXPORT int func(void) { + return retval; +} diff --git a/meson/test cases/common/178 bothlibraries/main.c b/meson/test cases/common/178 bothlibraries/main.c new file mode 100644 index 000000000..8237bae0a --- /dev/null +++ b/meson/test cases/common/178 bothlibraries/main.c @@ -0,0 +1,8 @@ +#include "mylib.h" + +DO_IMPORT int func(void); +DO_IMPORT int retval; + +int main(void) { + return func() == retval ? 0 : 1; +} diff --git a/meson/test cases/common/178 bothlibraries/meson.build b/meson/test cases/common/178 bothlibraries/meson.build new file mode 100644 index 000000000..f3191cc5d --- /dev/null +++ b/meson/test cases/common/178 bothlibraries/meson.build @@ -0,0 +1,50 @@ +project('both libraries linking test', 'c') + +both_libs = both_libraries('mylib', 'libfile.c') +dep = declare_dependency(link_with: both_libs) +exe_shared = executable('prog-shared', 'main.c', link_with : both_libs.get_shared_lib()) +exe_static = executable('prog-static', 'main.c', + c_args : ['-DSTATIC_COMPILATION'], + link_with : both_libs.get_static_lib()) +exe_both = executable('prog-both', 'main.c', link_with : both_libs) +exe_dep = executable('prog-dep', 'main.c', dependencies : [dep]) + +# Try using it in a custom_target +custom_target('tgt_a', + command: [ + find_program('./dummy.py'), + '@OUTPUT@', + both_libs, + ], + output: ['hello1.txt'], + input: [both_libs], +) + +test('runtest-shared', exe_shared) +test('runtest-static', exe_static) +test('runtest-both', exe_both) +test('runtest-dep', exe_dep) + +# Same as above, but using build_target() +both_libs2 = build_target('mylib2', 'libfile.c', target_type: 'both_libraries') +exe_shared2 = executable('prog-shared2', 'main.c', + link_with : both_libs2.get_shared_lib()) +exe_static2 = executable('prog-static2', 'main.c', + c_args : ['-DSTATIC_COMPILATION'], + link_with : both_libs2.get_static_lib()) +exe_both2 = executable('prog-both2', 'main.c', link_with : both_libs2) + +# Test {set,get}_variable +set_variable('both_libs2', both_libs) +both_libs3 = get_variable('both_libs') + +# Ensure that calling the build target methods also works +assert(both_libs.name() == 'mylib') +assert(both_libs2.name() == 'mylib') +assert(both_libs3.name() == 'mylib') +assert(both_libs2.get_shared_lib().name() == 'mylib') +assert(both_libs3.get_static_lib().name() == 'mylib') + +test('runtest-shared-2', exe_shared2) +test('runtest-static-2', exe_static2) +test('runtest-both-2', exe_both2) diff --git a/meson/test cases/common/178 bothlibraries/mylib.h b/meson/test cases/common/178 bothlibraries/mylib.h new file mode 100644 index 000000000..1038a0142 --- /dev/null +++ b/meson/test cases/common/178 bothlibraries/mylib.h @@ -0,0 +1,13 @@ +#pragma once + +#ifdef _WIN32 + #ifdef STATIC_COMPILATION + #define DO_IMPORT extern + #else + #define DO_IMPORT __declspec(dllimport) + #endif + #define DO_EXPORT __declspec(dllexport) +#else + #define DO_IMPORT extern + #define DO_EXPORT +#endif diff --git a/meson/test cases/common/179 escape and unicode/file.c.in b/meson/test cases/common/179 escape and unicode/file.c.in new file mode 100644 index 000000000..5dd6e50fa --- /dev/null +++ b/meson/test cases/common/179 escape and unicode/file.c.in @@ -0,0 +1,5 @@ +#include +const char* does_it_work(void) { + printf("{NAME}\n"); + return "yes it does"; +} diff --git a/meson/test cases/common/179 escape and unicode/file.py b/meson/test cases/common/179 escape and unicode/file.py new file mode 100644 index 000000000..40fa7ca48 --- /dev/null +++ b/meson/test cases/common/179 escape and unicode/file.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +import sys +import os + +with open(sys.argv[1]) as fh: + content = fh.read().replace("{NAME}", sys.argv[2]) + +with open(os.path.join(sys.argv[3]), 'w', errors='replace') as fh: + fh.write(content) diff --git a/meson/test cases/common/179 escape and unicode/find.py b/meson/test cases/common/179 escape and unicode/find.py new file mode 100644 index 000000000..34a3eb835 --- /dev/null +++ b/meson/test cases/common/179 escape and unicode/find.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 + +import os +import sys + +for fh in os.listdir('.'): + if os.path.isfile(fh): + if fh.endswith('.c'): + sys.stdout.write(fh + '\0') diff --git a/meson/test cases/common/179 escape and unicode/fun.c b/meson/test cases/common/179 escape and unicode/fun.c new file mode 100644 index 000000000..c5634d836 --- /dev/null +++ b/meson/test cases/common/179 escape and unicode/fun.c @@ -0,0 +1,3 @@ +int a_fun(void) { + return 1; +} diff --git a/meson/test cases/common/179 escape and unicode/main.c b/meson/test cases/common/179 escape and unicode/main.c new file mode 100644 index 000000000..6137ad79c --- /dev/null +++ b/meson/test cases/common/179 escape and unicode/main.c @@ -0,0 +1,12 @@ +#include + +const char* does_it_work(void); + +int a_fun(void); + +int main(void) { + if(strcmp(does_it_work(), "yes it does") != 0) { + return -a_fun(); + } + return 0; +} diff --git a/meson/test cases/common/179 escape and unicode/meson.build b/meson/test cases/common/179 escape and unicode/meson.build new file mode 100644 index 000000000..e4fe628ae --- /dev/null +++ b/meson/test cases/common/179 escape and unicode/meson.build @@ -0,0 +1,38 @@ +project('180 escape', 'c') + +gen = generator(find_program('file.py'), arguments:['@INPUT@', 'erd\u0151', '@OUTPUT@'], output: '@BASENAME@') + +gen_file = gen.process('file.c.in') + +find_file_list = run_command(find_program('find.py')) +assert(find_file_list.returncode() == 0, 'Didn\'t find any files.') + +# Strings should support both octal \ooo and hex \xhh encodings + +found_files_oct = [] +foreach l : find_file_list.stdout().strip('\0').split('\000') + found_files_oct += [files(l)] +endforeach + +test('first', executable('first', found_files_oct + [gen_file])) + +found_files_hex = [] +foreach l : find_file_list.stdout().strip('\x00').split('\x00') + found_files_hex += [files(l)] +endforeach + +test('second', executable('second', found_files_hex + [gen_file])) + +# Unrecognized and malformed escape sequences are literal + +malformed = [ + [ '\c', 'c' ], + [ '\Uabcdefghi', 'Uabcdefghi'], + [ '\u123 ', 'u123 '], + [ '\xqr', 'xqr'], +] + +foreach m : malformed + assert(m[0].endswith(m[1]), 'bad escape sequence had unexpected end') + assert(m[0].startswith('\\'), 'bad escape sequence had unexpected start') +endforeach diff --git a/meson/test cases/common/18 includedir/include/func.h b/meson/test cases/common/18 includedir/include/func.h new file mode 100644 index 000000000..647b72fa2 --- /dev/null +++ b/meson/test cases/common/18 includedir/include/func.h @@ -0,0 +1,6 @@ +#ifndef FUNC_H__ +#define FUNC_H__ + +int func(void); + +#endif diff --git a/meson/test cases/common/18 includedir/meson.build b/meson/test cases/common/18 includedir/meson.build new file mode 100644 index 000000000..17eec0e57 --- /dev/null +++ b/meson/test cases/common/18 includedir/meson.build @@ -0,0 +1,4 @@ +project('include dir test', 'c') + +inc = include_directories('include') +subdir('src') diff --git a/meson/test cases/common/18 includedir/src/func.c b/meson/test cases/common/18 includedir/src/func.c new file mode 100644 index 000000000..215beff8d --- /dev/null +++ b/meson/test cases/common/18 includedir/src/func.c @@ -0,0 +1,5 @@ +#include "func.h" + +int func(void) { + return 0; +} diff --git a/meson/test cases/common/18 includedir/src/meson.build b/meson/test cases/common/18 includedir/src/meson.build new file mode 100644 index 000000000..30d2e0c87 --- /dev/null +++ b/meson/test cases/common/18 includedir/src/meson.build @@ -0,0 +1,5 @@ +exe = executable('prog', 'prog.c', 'func.c', include_directories : inc) +test('inc test', exe) + +exe2 = executable('prog2', 'prog.c', 'func.c', include_directories : [['../include']]) +test('inc test 2', exe2) diff --git a/meson/test cases/common/18 includedir/src/prog.c b/meson/test cases/common/18 includedir/src/prog.c new file mode 100644 index 000000000..ce4dd67e9 --- /dev/null +++ b/meson/test cases/common/18 includedir/src/prog.c @@ -0,0 +1,5 @@ +#include "func.h" + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/180 has link arg/meson.build b/meson/test cases/common/180 has link arg/meson.build new file mode 100644 index 000000000..6bfbd59e7 --- /dev/null +++ b/meson/test cases/common/180 has link arg/meson.build @@ -0,0 +1,47 @@ +project('has link arg', 'c', 'cpp') + +cc = meson.get_compiler('c') +cpp = meson.get_compiler('cpp') + +if cc.get_argument_syntax() == 'msvc' + is_arg = '/OPT:REF' + useless = '/DEBUG' + isnt_arg = '/iambroken' +else + is_arg = '-Wl,-L/tmp' + useless = '-Wl,-L/usr' + isnt_arg = '-Wl,-iambroken' +endif + +assert(cc.has_link_argument(is_arg), 'Arg that should have worked does not work.') +assert(cpp.has_link_argument(is_arg), 'Arg that should have worked does not work.') + +if cc.get_id() != 'pgi' +assert(not cc.has_link_argument(isnt_arg), 'Arg that should be broken is not.') +assert(not cpp.has_link_argument(isnt_arg), 'Arg that should be broken is not.') + +assert(cc.get_supported_link_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.') +assert(cpp.get_supported_link_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.') + +# Have useless at the end to ensure that the search goes from front to back. +l1 = cc.first_supported_link_argument([isnt_arg, is_arg, isnt_arg, useless]) +l2 = cc.first_supported_link_argument(isnt_arg, isnt_arg, isnt_arg) + +assert(l1.length() == 1, 'First supported returned wrong result.') +assert(l1.get(0) == is_arg, 'First supported returned wrong argument.') +assert(l2.length() == 0, 'First supported did not return empty array.') + +l1 = cpp.first_supported_link_argument([isnt_arg, is_arg, isnt_arg, useless]) +l2 = cpp.first_supported_link_argument(isnt_arg, isnt_arg, isnt_arg) + +assert(l1.length() == 1, 'First supported returned wrong result.') +assert(l1.get(0) == is_arg, 'First supported returned wrong argument.') +assert(l2.length() == 0, 'First supported did not return empty array.') + +assert(not cc.has_multi_link_arguments([isnt_arg, is_arg]), 'Arg that should be broken is not.') + +assert(not cc.has_link_argument('-Wl,-z,nodelete42'), 'Did not detect wrong -z linker argument') +endif + +assert(cc.has_multi_link_arguments(is_arg), 'Arg that should have worked does not work.') +assert(cc.has_multi_link_arguments([useless, is_arg]), 'Arg that should have worked does not work.') diff --git a/meson/test cases/common/181 same target name flat layout/foo.c b/meson/test cases/common/181 same target name flat layout/foo.c new file mode 100644 index 000000000..ed427899a --- /dev/null +++ b/meson/test cases/common/181 same target name flat layout/foo.c @@ -0,0 +1 @@ +int meson_test_main_foo(void) { return 10; } diff --git a/meson/test cases/common/181 same target name flat layout/main.c b/meson/test cases/common/181 same target name flat layout/main.c new file mode 100644 index 000000000..6f02aeb82 --- /dev/null +++ b/meson/test cases/common/181 same target name flat layout/main.c @@ -0,0 +1,16 @@ +#include + +int meson_test_main_foo(void); +int meson_test_subproj_foo(void); + +int main(void) { + if (meson_test_main_foo() != 10) { + printf("Failed meson_test_main_foo\n"); + return 1; + } + if (meson_test_subproj_foo() != 20) { + printf("Failed meson_test_subproj_foo\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/181 same target name flat layout/meson.build b/meson/test cases/common/181 same target name flat layout/meson.build new file mode 100644 index 000000000..cfad2c2a5 --- /dev/null +++ b/meson/test cases/common/181 same target name flat layout/meson.build @@ -0,0 +1,15 @@ +project('subdir targets', 'c') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: many targets with the same name not supported in Xcode. Patches welcome.') +endif + +# Idea behind this test is to create targets with identical name +# but different output files. We can do this by choosing different +# name_prefix of libraries. Target id does not depend on name_prefix. + +main_foo = static_library('foo', 'foo.c', name_prefix : 'main') +subdir('subdir') # defines subdir_foo + +exe = executable('prog', 'main.c', link_with : [main_foo, subdir_foo]) +test('main test', exe) diff --git a/meson/test cases/common/181 same target name flat layout/subdir/foo.c b/meson/test cases/common/181 same target name flat layout/subdir/foo.c new file mode 100644 index 000000000..f33429229 --- /dev/null +++ b/meson/test cases/common/181 same target name flat layout/subdir/foo.c @@ -0,0 +1 @@ +int meson_test_subproj_foo(void) { return 20; } diff --git a/meson/test cases/common/181 same target name flat layout/subdir/meson.build b/meson/test cases/common/181 same target name flat layout/subdir/meson.build new file mode 100644 index 000000000..223a5ef90 --- /dev/null +++ b/meson/test cases/common/181 same target name flat layout/subdir/meson.build @@ -0,0 +1 @@ +subdir_foo = static_library('foo', 'foo.c', name_prefix : 'subdir') diff --git a/meson/test cases/common/182 find override/meson.build b/meson/test cases/common/182 find override/meson.build new file mode 100644 index 000000000..8dcbac76b --- /dev/null +++ b/meson/test cases/common/182 find override/meson.build @@ -0,0 +1,25 @@ +project('find program override', 'c') + +gencodegen = find_program('gencodegen', required : false) +six_prog = find_program('six_meson_exe', required : false) + +assert(not gencodegen.found(), 'gencodegen is an internal program, should not be found') +assert(not six_prog.found(), 'six_meson_exe is an internal program, should not be found') + +# Test the check-if-found-else-override workflow +if not gencodegen.found() + subdir('subdir') +endif + +subdir('otherdir') + +tool = find_program('sometool') +assert(tool.found()) +assert(tool.full_path() != '') +assert(tool.full_path() == tool.path()) + +# six_meson_exe is an overritten project executable +six_prog = find_program('six_meson_exe') +assert(six_prog.found()) +assert(six_prog.full_path() != '') +assert(six_prog.full_path() == six_prog.path()) diff --git a/meson/test cases/common/182 find override/otherdir/main.c b/meson/test cases/common/182 find override/otherdir/main.c new file mode 100644 index 000000000..5fb6371b8 --- /dev/null +++ b/meson/test cases/common/182 find override/otherdir/main.c @@ -0,0 +1,5 @@ +int be_seeing_you(void); + +int main(void) { + return be_seeing_you() == 6 ? 0 : 1; +} diff --git a/meson/test cases/common/182 find override/otherdir/main2.c b/meson/test cases/common/182 find override/otherdir/main2.c new file mode 100644 index 000000000..80a30091e --- /dev/null +++ b/meson/test cases/common/182 find override/otherdir/main2.c @@ -0,0 +1,5 @@ +int number_returner(void); + +int main(void) { + return number_returner() == 100 ? 0 : 1; +} diff --git a/meson/test cases/common/182 find override/otherdir/meson.build b/meson/test cases/common/182 find override/otherdir/meson.build new file mode 100644 index 000000000..7deff4035 --- /dev/null +++ b/meson/test cases/common/182 find override/otherdir/meson.build @@ -0,0 +1,30 @@ +gen = find_program('codegen') # Should use overridden value set in "subdir". + +src = custom_target('arrival', + input : 'source.desc', + output : 'file.c', + command : [gen, '@INPUT@', '@OUTPUT@'] + ) + +e = executable('six', 'main.c', src) + +test('six', e) + +# Override stuff with an executables +meson.override_find_program('six_meson_exe', e) + + +# The same again, but this time with a program that was generated +# with configure_file. + +gen = find_program('gencodegen') + +src = custom_target('hundred', + input : 'source2.desc', + output : 'file2.c', + command : [gen, '@INPUT@', '@OUTPUT@'] + ) + +e = executable('hundred', 'main2.c', src) + +test('hundred', e) diff --git a/meson/test cases/common/182 find override/otherdir/source.desc b/meson/test cases/common/182 find override/otherdir/source.desc new file mode 100644 index 000000000..8b19c9c2f --- /dev/null +++ b/meson/test cases/common/182 find override/otherdir/source.desc @@ -0,0 +1 @@ +be_seeing_you diff --git a/meson/test cases/common/182 find override/otherdir/source2.desc b/meson/test cases/common/182 find override/otherdir/source2.desc new file mode 100644 index 000000000..965f868df --- /dev/null +++ b/meson/test cases/common/182 find override/otherdir/source2.desc @@ -0,0 +1 @@ +number_returner diff --git a/meson/test cases/common/182 find override/subdir/converter.py b/meson/test cases/common/182 find override/subdir/converter.py new file mode 100755 index 000000000..efe04649d --- /dev/null +++ b/meson/test cases/common/182 find override/subdir/converter.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import sys +import pathlib + +[ifilename, ofilename] = sys.argv[1:3] + +ftempl = '''int %s(void) { + return 6; +} +''' + +d = pathlib.Path(ifilename).read_text().split('\n')[0].strip() + +pathlib.Path(ofilename).write_text(ftempl % d) diff --git a/meson/test cases/common/182 find override/subdir/gencodegen.py.in b/meson/test cases/common/182 find override/subdir/gencodegen.py.in new file mode 100755 index 000000000..78ebb2f4e --- /dev/null +++ b/meson/test cases/common/182 find override/subdir/gencodegen.py.in @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import sys +import pathlib + +[ifilename, ofilename] = sys.argv[1:3] + +ftempl = '''int %s(void) { + return @NUMBER@; +} +''' + +d = pathlib.Path(ifilename).read_text().split('\n')[0].strip() + +pathlib.Path(ofilename).write_text(ftempl % d) diff --git a/meson/test cases/common/182 find override/subdir/meson.build b/meson/test cases/common/182 find override/subdir/meson.build new file mode 100644 index 000000000..e5de34dde --- /dev/null +++ b/meson/test cases/common/182 find override/subdir/meson.build @@ -0,0 +1,14 @@ +x = find_program('converter.py') + +meson.override_find_program('codegen', x) + +# Override a command with a generated script + +cdata = configuration_data() + +cdata.set('NUMBER', 100) +numprog = configure_file(input : 'gencodegen.py.in', + output : 'gencodegen.py', + configuration : cdata) + +meson.override_find_program('gencodegen', numprog) diff --git a/meson/test cases/common/182 find override/subprojects/sub.wrap b/meson/test cases/common/182 find override/subprojects/sub.wrap new file mode 100644 index 000000000..17aa3323f --- /dev/null +++ b/meson/test cases/common/182 find override/subprojects/sub.wrap @@ -0,0 +1,5 @@ +[wrap-file] +directory = sub + +[provide] +program_names = sometool diff --git a/meson/test cases/common/182 find override/subprojects/sub/meson.build b/meson/test cases/common/182 find override/subprojects/sub/meson.build new file mode 100644 index 000000000..640f2704c --- /dev/null +++ b/meson/test cases/common/182 find override/subprojects/sub/meson.build @@ -0,0 +1,4 @@ +project('tools') + +exe = find_program('gencodegen') +meson.override_find_program('sometool', exe) diff --git a/meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.c b/meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.c new file mode 100644 index 000000000..215112ca5 --- /dev/null +++ b/meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.c @@ -0,0 +1,16 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#error "Included C sources that shouldn't be." diff --git a/meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.h b/meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.h new file mode 100644 index 000000000..28c81c9d3 --- /dev/null +++ b/meson/test cases/common/183 partial dependency/declare_dependency/headers/foo.h @@ -0,0 +1,16 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +int foo(void); diff --git a/meson/test cases/common/183 partial dependency/declare_dependency/main.c b/meson/test cases/common/183 partial dependency/declare_dependency/main.c new file mode 100644 index 000000000..057b71304 --- /dev/null +++ b/meson/test cases/common/183 partial dependency/declare_dependency/main.c @@ -0,0 +1,25 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "foo.h" + +int main(void) { + int a = foo(); + if (a == 1) { + return 0; + } else { + return 1; + } +} diff --git a/meson/test cases/common/183 partial dependency/declare_dependency/meson.build b/meson/test cases/common/183 partial dependency/declare_dependency/meson.build new file mode 100644 index 000000000..3783f6694 --- /dev/null +++ b/meson/test cases/common/183 partial dependency/declare_dependency/meson.build @@ -0,0 +1,32 @@ +# Copyright © 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +dec_sub_dep = declare_dependency( + include_directories : include_directories('headers'), +) + +dec_dep = declare_dependency( + sources : files('headers/foo.c'), + dependencies : dec_sub_dep, +) + +sub_dep = dec_dep.partial_dependency(includes : true) + +dec_exe = executable( + 'declare_dep', + files('main.c', 'other.c'), + dependencies : sub_dep, +) + +test('Declare Dependency', dec_exe) diff --git a/meson/test cases/common/183 partial dependency/declare_dependency/other.c b/meson/test cases/common/183 partial dependency/declare_dependency/other.c new file mode 100644 index 000000000..b1e199e44 --- /dev/null +++ b/meson/test cases/common/183 partial dependency/declare_dependency/other.c @@ -0,0 +1,20 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "foo.h" + +int foo(void) { + return 1; +} diff --git a/meson/test cases/common/183 partial dependency/meson.build b/meson/test cases/common/183 partial dependency/meson.build new file mode 100644 index 000000000..e908487f1 --- /dev/null +++ b/meson/test cases/common/183 partial dependency/meson.build @@ -0,0 +1,17 @@ +# Copyright © 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project('partial dependency', ['c', 'cpp']) + +subdir('declare_dependency') diff --git a/meson/test cases/common/184 openmp/main.c b/meson/test cases/common/184 openmp/main.c new file mode 100644 index 000000000..cc81f4897 --- /dev/null +++ b/meson/test cases/common/184 openmp/main.c @@ -0,0 +1,16 @@ +#include +#include + +int main(void) { +#ifdef _OPENMP + if (omp_get_max_threads() == 2) { + return 0; + } else { + printf("Max threads is %d not 2.\n", omp_get_max_threads()); + return 1; + } +#else + printf("_OPENMP is not defined; is OpenMP compilation working?\n"); + return 1; +#endif +} diff --git a/meson/test cases/common/184 openmp/main.cpp b/meson/test cases/common/184 openmp/main.cpp new file mode 100644 index 000000000..b12be3fd8 --- /dev/null +++ b/meson/test cases/common/184 openmp/main.cpp @@ -0,0 +1,16 @@ +#include +#include + +int main(void) { +#ifdef _OPENMP + if (omp_get_max_threads() == 2) { + return 0; + } else { + std::cout << "Max threads is " << omp_get_max_threads() << " not 2." << std::endl; + return 1; + } +#else + printf("_OPENMP is not defined; is OpenMP compilation working?\n"); + return 1; +#endif +} diff --git a/meson/test cases/common/184 openmp/main.f90 b/meson/test cases/common/184 openmp/main.f90 new file mode 100644 index 000000000..d80f90fdf --- /dev/null +++ b/meson/test cases/common/184 openmp/main.f90 @@ -0,0 +1,9 @@ +use, intrinsic :: iso_fortran_env, only: stderr=>error_unit +use omp_lib + +if (omp_get_max_threads() /= 2) then + write(stderr, *) 'Max Fortran threads is', omp_get_max_threads(), 'not 2.' + stop 1 +endif + +end program diff --git a/meson/test cases/common/184 openmp/meson.build b/meson/test cases/common/184 openmp/meson.build new file mode 100644 index 000000000..a1154c22c --- /dev/null +++ b/meson/test cases/common/184 openmp/meson.build @@ -0,0 +1,58 @@ +project('openmp', 'c') + +cc = meson.get_compiler('c') +if cc.get_id() == 'gcc' and cc.version().version_compare('<4.2.0') + error('MESON_SKIP_TEST gcc is too old to support OpenMP.') +endif +if cc.get_id() == 'clang' and cc.version().version_compare('<3.7.0') + error('MESON_SKIP_TEST clang is too old to support OpenMP.') +endif +if cc.get_id() == 'msvc' and cc.version().version_compare('<17') + error('MESON_SKIP_TEST msvc is too old to support OpenMP.') +endif +if cc.get_id() == 'clang-cl' + error('MESON_SKIP_TEST clang-cl does not support OpenMP.') +endif +if cc.get_id() == 'clang' and host_machine.system() == 'windows' + error('MESON_SKIP_TEST Windows clang does not support OpenMP.') +endif +if host_machine.system() == 'darwin' + error('MESON_SKIP_TEST macOS does not support OpenMP.') +endif + +openmp = dependency('openmp') +env = environment() +env.set('OMP_NUM_THREADS', '2') + +exec = executable('exec', + 'main.c', + dependencies : [openmp]) +test('OpenMP C', exec, env : env) + +if not(build_machine.system() == 'windows' and cc.get_id() == 'pgi') + if add_languages('cpp', required : false) + execpp = executable('execpp', + 'main.cpp', + dependencies : [openmp]) + test('OpenMP C++', execpp, env : env) + endif +endif + +if add_languages('fortran', required : false) + # Mixing compilers (msvc/clang with gfortran) does not seem to work on Windows. + if build_machine.system() != 'windows' or cc.get_id() == 'gnu' + exef = executable('exef', + 'main.f90', + dependencies : [openmp]) + test('OpenMP Fortran', exef, env : env) + + openmp_f = dependency('openmp', language : 'fortran') + exe_f = executable('exe_f', + 'main.f90', + dependencies : [openmp_f]) + test('OpenMP Fortran-specific', exe_f, env : env) + endif +endif + +# Check we can apply a version constraint +dependency('openmp', version: '>=@0@'.format(openmp.version())) diff --git a/meson/test cases/common/185 same target name/file.c b/meson/test cases/common/185 same target name/file.c new file mode 100644 index 000000000..91800303c --- /dev/null +++ b/meson/test cases/common/185 same target name/file.c @@ -0,0 +1,3 @@ +int func(void) { + return 0; +} diff --git a/meson/test cases/common/185 same target name/meson.build b/meson/test cases/common/185 same target name/meson.build new file mode 100644 index 000000000..4e585d56e --- /dev/null +++ b/meson/test cases/common/185 same target name/meson.build @@ -0,0 +1,4 @@ +project('same name', 'c') + +static_library('foo', 'file.c') +subdir('sub') diff --git a/meson/test cases/common/185 same target name/sub/file2.c b/meson/test cases/common/185 same target name/sub/file2.c new file mode 100644 index 000000000..3d1a1c323 --- /dev/null +++ b/meson/test cases/common/185 same target name/sub/file2.c @@ -0,0 +1,3 @@ +int func(void) { + return 5; +} diff --git a/meson/test cases/common/185 same target name/sub/meson.build b/meson/test cases/common/185 same target name/sub/meson.build new file mode 100644 index 000000000..610a4a3c7 --- /dev/null +++ b/meson/test cases/common/185 same target name/sub/meson.build @@ -0,0 +1 @@ +static_library('foo', 'file2.c') diff --git a/meson/test cases/common/186 test depends/gen.py b/meson/test cases/common/186 test depends/gen.py new file mode 100755 index 000000000..ee4ed9818 --- /dev/null +++ b/meson/test cases/common/186 test depends/gen.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +import sys + + +def main(): + with open(sys.argv[1], 'w') as out: + out.write(sys.argv[2]) + out.write('\n') + + +if __name__ == '__main__': + main() diff --git a/meson/test cases/common/186 test depends/main.c b/meson/test cases/common/186 test depends/main.c new file mode 100644 index 000000000..78f2de106 --- /dev/null +++ b/meson/test cases/common/186 test depends/main.c @@ -0,0 +1 @@ +int main(void) { return 0; } diff --git a/meson/test cases/common/186 test depends/meson.build b/meson/test cases/common/186 test depends/meson.build new file mode 100644 index 000000000..888c45118 --- /dev/null +++ b/meson/test cases/common/186 test depends/meson.build @@ -0,0 +1,26 @@ +project('test depends', 'c') + +gen = find_program('gen.py') + +custom_dep = custom_target('custom_dep', + build_by_default : false, + output : 'custom_dep.txt', + command : [gen, '@OUTPUT@', 'custom_dep'], +) + +exe_dep = executable('exe_dep', 'main.c', + build_by_default : false, +) + +test_prog = find_program('test.py') +test('string dependencies', test_prog, + args : [ + # This is declared for convenience, + # real use case might have some obscure method + # to find these dependencies, e.g. automatic plugin loading. + 'custom_dep.txt', + exe_dep.full_path(), + ], + depends : [custom_dep, exe_dep], + workdir : meson.current_build_dir(), +) diff --git a/meson/test cases/common/186 test depends/test.py b/meson/test cases/common/186 test depends/test.py new file mode 100755 index 000000000..5b9f65c86 --- /dev/null +++ b/meson/test cases/common/186 test depends/test.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +import os +import os.path +import sys + + +def main(): + print('Looking in:', os.getcwd()) + not_found = list() + for f in sys.argv[1:]: + if not os.path.exists(f): + not_found.append(f) + if not_found: + print('Not found:', ', '.join(not_found)) + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/meson/test cases/common/187 args flattening/meson.build b/meson/test cases/common/187 args flattening/meson.build new file mode 100644 index 000000000..1dac2f912 --- /dev/null +++ b/meson/test cases/common/187 args flattening/meson.build @@ -0,0 +1,31 @@ +project('args flattening') + +arr = get_variable('does-not-exist', ['bar', 'baz']) +assert(arr == ['bar', 'baz'], 'get_variable with array fallback is broken') + +set_variable('arr', ['bar', 'baz']) +assert(arr == ['bar', 'baz'], 'set_variable(array) is broken') + +conf = configuration_data() +conf.set('foo', ['bar', 'baz']) +assert(conf.get('foo') == ['bar', 'baz'], 'configuration_data.set(array) is broken') + +arr = conf.get('does-not-exist', ['bar', 'baz']) +assert(arr == ['bar', 'baz'], 'configuration_data.get with array fallback is broken') + +arr = meson.get_cross_property('does-not-exist', ['bar', 'baz']) +assert(arr == ['bar', 'baz'], 'meson.get_cross_property with array fallback is broken') + +arr = meson.get_external_property('does-not-exist', ['bar', 'baz']) +assert(arr == ['bar', 'baz'], 'meson.get_external_property with array fallback is broken') + +arr = meson.get_external_property('does-not-exist', ['bar', 'baz'], native: true) +assert(arr == ['bar', 'baz'], 'meson.get_external_property native:true with array fallback is broken') + +arr = meson.get_external_property('does-not-exist', ['bar', 'baz'], native: false) +assert(arr == ['bar', 'baz'], 'meson.get_external_property native:false with array fallback is broken') + +# Test deprecated behaviour + +conf.set(['foo', 'bar']) +message(conf.get('foo')) diff --git a/meson/test cases/common/188 dict/meson.build b/meson/test cases/common/188 dict/meson.build new file mode 100644 index 000000000..dacf01db3 --- /dev/null +++ b/meson/test cases/common/188 dict/meson.build @@ -0,0 +1,71 @@ +project('dict test', 'c') + +dict = {'foo' : 'bar', + 'baz' : 'foo', + 'foo bar': 'baz'} + +exe = executable('prog', sources : ['prog.c']) + +i = 0 + +foreach key, value : dict + test('dict test @0@'.format(key), exe, + args : [dict[key], value]) + i += 1 +endforeach + +assert(i == 3, 'There should be three elements in that dictionary') + +empty_dict = {} + +foreach key, value : empty_dict + assert(false, 'This dict should be empty') +endforeach + +d1 = empty_dict + {'a' : 'b'} +assert(d1 == {'a' : 'b'}, 'dict addition is not working') + +d2 = d1 + {'a' : 'b2', 'c' : 'd'} +assert(d2 == {'a' : 'b2', 'c' : 'd'}, 'dict addition is not working') +assert(d1 == {'a' : 'b'}, 'dict should be immutable') + +d3 = d2 +d3 += {'e' : 'f'} +assert(d3 == {'a' : 'b2', 'c' : 'd', 'e' : 'f'}, 'dict plusassign is not working') +assert(d2 == {'a' : 'b2', 'c' : 'd'}, 'dict should be immutable') + +dict1 = {} + +# A variable to be used as a key +testkey1 = 'myKey1' +testkey2 = 'myKey2' + +# Add new entry using the variable +dict1 += {testkey1 : 'myValue'} +dict1 += {testkey2 : 42} + +# Test that the stored values are correct +assert(dict1[testkey1] == 'myValue', + 'Incorrect string value retrieved from dictionary - variable key') +assert(dict1['myKey1'] == 'myValue', + 'Incorrect string value retrieved from dictionary - literal key') +assert(dict1[testkey2] == 42, + 'Incorrect int value retrieved from dictionary - variable key') +assert(dict1['myKey2'] == 42, + 'Incorrect int value retrieved from dictionary - literal key') + +d = {testkey1 : 1} +assert(d[testkey1] == 1, + 'Incorrect int value retrieved from dictionary - variable key') +assert(d['myKey1'] == 1, + 'Incorrect int value retrieved from dictionary - literal key') + +d = {'1' / '2' : 1, join_paths('a', 'b') : 2} +k1 = '1' / '2' +k2 = join_paths('a', 'b') +assert(d[k1] == 1, 'Incorrect expression evaluation in dictionary key') +assert(d[k2] == 2, 'Incorrect expression evaluation in dictionary key') + +d = {'a' + 'b' : 1} +assert(d['a' + 'b'] == 1, 'Incorrect expression evaluation in dictionary key') +assert(d['ab'] == 1, 'Incorrect expression evaluation in dictionary key') diff --git a/meson/test cases/common/188 dict/prog.c b/meson/test cases/common/188 dict/prog.c new file mode 100644 index 000000000..bf0999d4a --- /dev/null +++ b/meson/test cases/common/188 dict/prog.c @@ -0,0 +1,8 @@ +#include + +int main(int argc, char **argv) { + if (argc != 3) + return 1; + + return strcmp(argv[1], argv[2]); +} diff --git a/meson/test cases/common/189 check header/meson.build b/meson/test cases/common/189 check header/meson.build new file mode 100644 index 000000000..98b395de5 --- /dev/null +++ b/meson/test cases/common/189 check header/meson.build @@ -0,0 +1,48 @@ +project('check header', 'c', 'cpp') + +host_system = host_machine.system() + +non_existent_header = 'ouagadougou.h' + +# Copy it into the builddir to ensure that it isn't found even if it's there +configure_file(input : non_existent_header, + output : non_existent_header, + copy: true) + +fallback = '' + +foreach comp : [meson.get_compiler('c'), meson.get_compiler('cpp')] + assert(comp.check_header('stdio.h', prefix : fallback), 'Stdio missing.') + + # stdio.h doesn't actually need stdlib.h, but just test that setting the + # prefix does not result in an error. + assert(comp.check_header('stdio.h', prefix : '#include ' + fallback), + 'Stdio missing.') + + # Test that check_header behaves differently than has_header. The second + # check without windows.h will fail with check_header. + # We only do this check on MSVC because MinGW often defines its own wrappers + # that pre-include windows.h + if comp.get_id() == 'msvc' + assert(comp.check_header('XInput.h', prefix : '#include ' + fallback), + 'XInput.h should not be missing on Windows') + assert(not comp.check_header('XInput.h'), 'XInput.h needs windows.h') + endif + + # Test that the following GCC bug doesn't happen: + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80005 + # https://github.com/mesonbuild/meson/issues/1458 + if host_system == 'linux' + assert(comp.check_header('linux/socket.h', prefix : fallback), + 'Could not find ') + if comp.has_header('intrin.h', prefix : fallback) + assert(not comp.check_header('intrin.h'), + 'intrin.h should not be usable on linux') + endif + endif + + # This header exists in the source and the builddir, but we still must not + # find it since we are looking in the system directories. + assert(not comp.check_header(non_existent_header, prefix : fallback), + 'Found non-existent header.') +endforeach diff --git a/meson/test cases/common/189 check header/ouagadougou.h b/meson/test cases/common/189 check header/ouagadougou.h new file mode 100644 index 000000000..2f76c49cc --- /dev/null +++ b/meson/test cases/common/189 check header/ouagadougou.h @@ -0,0 +1 @@ +#define OMG_THIS_SHOULDNT_BE_FOUND diff --git a/meson/test cases/common/19 header in file list/header.h b/meson/test cases/common/19 header in file list/header.h new file mode 100644 index 000000000..354499acd --- /dev/null +++ b/meson/test cases/common/19 header in file list/header.h @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/19 header in file list/meson.build b/meson/test cases/common/19 header in file list/meson.build new file mode 100644 index 000000000..79eec8d68 --- /dev/null +++ b/meson/test cases/common/19 header in file list/meson.build @@ -0,0 +1,14 @@ +project('header in file list', 'c') + +cc_id = meson.get_compiler('c').get_id() +cc_ver = meson.get_compiler('c').version() + +if cc_id == 'intel' or (cc_id == 'lcc' and cc_ver.version_compare('<=1.23.08')) + # ICC and LCC <= 1.23.08 do not escape spaces in paths in the dependency file, so Ninja + # (correctly) thinks that the rule has multiple outputs and errors out: + # 'depfile has multiple output paths' + error('MESON_SKIP_TEST: Skipping test because your compiler is known to generate broken dependency files') +endif + +exe = executable('prog', 'prog.c', 'header.h') +test('basic', exe) diff --git a/meson/test cases/common/19 header in file list/prog.c b/meson/test cases/common/19 header in file list/prog.c new file mode 100644 index 000000000..97acb3fc9 --- /dev/null +++ b/meson/test cases/common/19 header in file list/prog.c @@ -0,0 +1,3 @@ +#include "header.h" + +int main(void) { return 0; } diff --git a/meson/test cases/common/190 install_mode/config.h.in b/meson/test cases/common/190 install_mode/config.h.in new file mode 100644 index 000000000..14a155874 --- /dev/null +++ b/meson/test cases/common/190 install_mode/config.h.in @@ -0,0 +1,5 @@ +#define MESSAGE "@var@" +#define OTHER "@other@" "@second@" "@empty@" + +#mesondefine BE_TRUE +#mesondefine SHOULD_BE_UNDEF diff --git a/meson/test cases/common/190 install_mode/data_source.txt b/meson/test cases/common/190 install_mode/data_source.txt new file mode 100644 index 000000000..0c23cc0c3 --- /dev/null +++ b/meson/test cases/common/190 install_mode/data_source.txt @@ -0,0 +1 @@ +This is a text only input file. diff --git a/meson/test cases/common/190 install_mode/foo.1 b/meson/test cases/common/190 install_mode/foo.1 new file mode 100644 index 000000000..647c0970c --- /dev/null +++ b/meson/test cases/common/190 install_mode/foo.1 @@ -0,0 +1 @@ +this is a man page of foo.1 its contents are irrelevant diff --git a/meson/test cases/common/190 install_mode/meson.build b/meson/test cases/common/190 install_mode/meson.build new file mode 100644 index 000000000..cae1e91ab --- /dev/null +++ b/meson/test cases/common/190 install_mode/meson.build @@ -0,0 +1,59 @@ +project('install_mode test', 'c', + default_options : ['install_umask=027', 'libdir=libtest']) + +if build_machine.system() == 'windows' + error('MESON_SKIP_TEST: install_mode test requires a Unix-like OS') +endif + +# confirm no regressions in install_data +install_data('runscript.sh', + install_dir : get_option('bindir'), + install_mode : ['rwxr-sr-x', 'root', 0]) + +# confirm no regressions in install_subdir +install_subdir('sub1', + install_dir : 'share', + install_mode : ['rwxr-x--t', 'root']) + +install_subdir('sub2', + install_dir : 'share') + +# test install_mode in configure_file +conf = configuration_data() +conf.set('var', 'mystring') +conf.set('other', 'string 2') +conf.set('second', ' bonus') +conf.set('BE_TRUE', true) +configure_file(input : 'config.h.in', + output : 'config.h', + configuration : conf, + install_dir : 'include', + install_mode : 'rw-rwSr--') + +# test install_mode in custom_target +custom_target('bindat', + output : 'data.dat', + input : 'data_source.txt', + command : ['cp', '@INPUT@', '@OUTPUT@'], + install : true, + install_dir : 'subdir', + install_mode : 'rw-rwSr--') + +# test install_mode in install_headers +install_headers('rootdir.h', + install_mode : 'r--r--r-T') + +# test install_mode in install_man +install_man('foo.1', + install_mode : 'r--r--r-T') + +# test install_mode in executable +executable('trivialprog', + sources : 'trivial.c', + install : true, + install_mode : ['rwxr-sr-x', 'root', 'root']) + +# test install_mode in static_library +static_library('stat', 'stat.c', + install : true, + install_mode : ['rw---Sr--']) diff --git a/meson/test cases/common/190 install_mode/rootdir.h b/meson/test cases/common/190 install_mode/rootdir.h new file mode 100644 index 000000000..72fb13220 --- /dev/null +++ b/meson/test cases/common/190 install_mode/rootdir.h @@ -0,0 +1,3 @@ +/* This header goes to include dir root. */ + +int root_func(); diff --git a/meson/test cases/common/190 install_mode/runscript.sh b/meson/test cases/common/190 install_mode/runscript.sh new file mode 100644 index 000000000..8bc5ca6ce --- /dev/null +++ b/meson/test cases/common/190 install_mode/runscript.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +echo "Runscript" diff --git a/meson/test cases/common/190 install_mode/stat.c b/meson/test cases/common/190 install_mode/stat.c new file mode 100644 index 000000000..4825cefd2 --- /dev/null +++ b/meson/test cases/common/190 install_mode/stat.c @@ -0,0 +1 @@ +int func(void) { return 933; } diff --git a/meson/test cases/common/190 install_mode/sub1/second.dat b/meson/test cases/common/190 install_mode/sub1/second.dat new file mode 100644 index 000000000..48857a8b6 --- /dev/null +++ b/meson/test cases/common/190 install_mode/sub1/second.dat @@ -0,0 +1 @@ +Test that multiple install_subdirs meld their results. \ No newline at end of file diff --git a/meson/test cases/common/190 install_mode/sub2/stub b/meson/test cases/common/190 install_mode/sub2/stub new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/190 install_mode/test.json b/meson/test cases/common/190 install_mode/test.json new file mode 100644 index 000000000..3614dbcca --- /dev/null +++ b/meson/test cases/common/190 install_mode/test.json @@ -0,0 +1,15 @@ +{ + "installed": [ + {"type": "file", "file": "usr/bin/runscript.sh"}, + {"type": "exe", "file": "usr/bin/trivialprog"}, + {"type": "pdb", "file": "usr/bin/trivialprog"}, + {"type": "file", "file": "usr/include/config.h"}, + {"type": "file", "file": "usr/include/rootdir.h"}, + {"type": "file", "file": "usr/libtest/libstat.a"}, + {"type": "file", "file": "usr/share/man/man1/foo.1"}, + {"type": "file", "file": "usr/share/sub1/second.dat"}, + {"type": "file", "file": "usr/share/sub2/stub"}, + {"type": "file", "file": "usr/subdir/data.dat"} + ], + "do_not_set_opts": ["libdir"] +} diff --git a/meson/test cases/common/190 install_mode/trivial.c b/meson/test cases/common/190 install_mode/trivial.c new file mode 100644 index 000000000..96612d48b --- /dev/null +++ b/meson/test cases/common/190 install_mode/trivial.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("Trivial test is working.\n"); + return 0; +} diff --git a/meson/test cases/common/191 subproject array version/meson.build b/meson/test cases/common/191 subproject array version/meson.build new file mode 100644 index 000000000..0870bc498 --- /dev/null +++ b/meson/test cases/common/191 subproject array version/meson.build @@ -0,0 +1,3 @@ +project('master', 'c') + +x = subproject('foo', version : ['>=1.0.0', '<2.0']) diff --git a/meson/test cases/common/191 subproject array version/subprojects/foo/meson.build b/meson/test cases/common/191 subproject array version/subprojects/foo/meson.build new file mode 100644 index 000000000..f4ff53548 --- /dev/null +++ b/meson/test cases/common/191 subproject array version/subprojects/foo/meson.build @@ -0,0 +1 @@ +project('foo', 'c', version : '1.0.0') diff --git a/meson/test cases/common/192 feature option/meson.build b/meson/test cases/common/192 feature option/meson.build new file mode 100644 index 000000000..b5e26fac3 --- /dev/null +++ b/meson/test cases/common/192 feature option/meson.build @@ -0,0 +1,62 @@ +project('feature user option', 'c') + +feature_opts = get_option('auto_features') +required_opt = get_option('required') +optional_opt = get_option('optional') +disabled_opt = get_option('disabled') + +assert(not feature_opts.enabled(), 'Should be auto option') +assert(not feature_opts.disabled(), 'Should be auto option') +assert(feature_opts.auto(), 'Should be auto option') +assert(feature_opts.allowed(), 'Should be auto option') + +assert(required_opt.enabled(), 'Should be enabled option') +assert(not required_opt.disabled(), 'Should be enabled option') +assert(not required_opt.auto(), 'Should be enabled option') +assert(required_opt.allowed(), 'Should be enabled option') +assert(required_opt.require(true, error_message: 'xyz').enabled(), 'Should be enabled option') +assert(required_opt.disable_auto_if(true).enabled(), 'Should be enabled option') +assert(required_opt.disable_auto_if(false).enabled(), 'Should be enabled option') + +assert(not optional_opt.enabled(), 'Should be auto option') +assert(not optional_opt.disabled(), 'Should be auto option') +assert(optional_opt.auto(), 'Should be auto option') +assert(optional_opt.allowed(), 'Should be auto option') +assert(optional_opt.require(true).auto(), 'Should be auto option') +assert(optional_opt.require(false, error_message: 'xyz').disabled(), 'Should be disabled auto option') +assert(optional_opt.disable_auto_if(true).disabled(), 'Should be disabled auto option') +assert(optional_opt.disable_auto_if(false).auto(), 'Should be auto option') + +assert(not disabled_opt.enabled(), 'Should be disabled option') +assert(disabled_opt.disabled(), 'Should be disabled option') +assert(not disabled_opt.auto(), 'Should be disabled option') +assert(not disabled_opt.allowed(), 'Should be disabled option') +assert(disabled_opt.require(true).disabled(), 'Should be disabled option') +assert(disabled_opt.require(false, error_message: 'xyz').disabled(), 'Should be disabled option') +assert(disabled_opt.disable_auto_if(true).disabled(), 'Should be disabled option') +assert(disabled_opt.disable_auto_if(false).disabled(), 'Should be disabled option') + +dep = dependency('threads', required : required_opt) +assert(dep.found(), 'Should find required "threads" dep') + +dep = dependency('threads', required : optional_opt) +assert(dep.found(), 'Should find optional "threads" dep') + +dep = dependency('threads', required : disabled_opt) +assert(not dep.found(), 'Should not find disabled "threads" dep') + +dep = dependency('notfounddep', required : optional_opt) +assert(not dep.found(), 'Should not find optional "notfounddep" dep') + +dep = dependency('notfounddep', required : disabled_opt) +assert(not dep.found(), 'Should not find disabled "notfounddep" dep') + +cc = meson.get_compiler('c') +lib = cc.find_library('m', required : disabled_opt) +assert(not lib.found(), 'Should not find "m" library') + +cp = find_program('cp', required : disabled_opt) +assert(not cp.found(), 'Should not find "cp" program') + +found = add_languages('cpp', required : disabled_opt) +assert(not found, 'Should not find "cpp" language') diff --git a/meson/test cases/common/192 feature option/meson_options.txt b/meson/test cases/common/192 feature option/meson_options.txt new file mode 100644 index 000000000..063a35f39 --- /dev/null +++ b/meson/test cases/common/192 feature option/meson_options.txt @@ -0,0 +1,3 @@ +option('required', type : 'feature', value : 'enabled', description : 'An required feature') +option('optional', type : 'feature', value : 'auto', description : 'An optional feature') +option('disabled', type : 'feature', value : 'disabled', description : 'A disabled feature') diff --git a/meson/test cases/common/193 feature option disabled/meson.build b/meson/test cases/common/193 feature option disabled/meson.build new file mode 100644 index 000000000..1a831872a --- /dev/null +++ b/meson/test cases/common/193 feature option disabled/meson.build @@ -0,0 +1,23 @@ +project('feature user option', 'c', + default_options : ['auto_features=disabled']) + +feature_opts = get_option('auto_features') +required_opt = get_option('required') +optional_opt = get_option('optional') +disabled_opt = get_option('disabled') + +assert(not feature_opts.enabled(), 'Should be disabled option') +assert(feature_opts.disabled(), 'Should be disabled option') +assert(not feature_opts.auto(), 'Should be disabled option') + +assert(required_opt.enabled(), 'Should be enabled option') +assert(not required_opt.disabled(), 'Should be enabled option') +assert(not required_opt.auto(), 'Should be enabled option') + +assert(not optional_opt.enabled(), 'Auto feature should be disabled') +assert(optional_opt.disabled(), 'Auto feature should be disabled') +assert(not optional_opt.auto(), 'Auto feature should be disabled') + +assert(not disabled_opt.enabled(), 'Should be disabled option') +assert(disabled_opt.disabled(), 'Should be disabled option') +assert(not disabled_opt.auto(), 'Should be disabled option') diff --git a/meson/test cases/common/193 feature option disabled/meson_options.txt b/meson/test cases/common/193 feature option disabled/meson_options.txt new file mode 100644 index 000000000..063a35f39 --- /dev/null +++ b/meson/test cases/common/193 feature option disabled/meson_options.txt @@ -0,0 +1,3 @@ +option('required', type : 'feature', value : 'enabled', description : 'An required feature') +option('optional', type : 'feature', value : 'auto', description : 'An optional feature') +option('disabled', type : 'feature', value : 'disabled', description : 'A disabled feature') diff --git a/meson/test cases/common/194 static threads/lib1.c b/meson/test cases/common/194 static threads/lib1.c new file mode 100644 index 000000000..1aa786c66 --- /dev/null +++ b/meson/test cases/common/194 static threads/lib1.c @@ -0,0 +1,13 @@ +#if defined _WIN32 +#include +#else +#include +#endif + +void *f(void) { +#if defined _WIN32 + return CreateThread; +#else + return pthread_create; +#endif +} diff --git a/meson/test cases/common/194 static threads/lib2.c b/meson/test cases/common/194 static threads/lib2.c new file mode 100644 index 000000000..e988814e2 --- /dev/null +++ b/meson/test cases/common/194 static threads/lib2.c @@ -0,0 +1,5 @@ +extern void *f(void); + +void *g(void) { + return f(); +} diff --git a/meson/test cases/common/194 static threads/meson.build b/meson/test cases/common/194 static threads/meson.build new file mode 100644 index 000000000..427920034 --- /dev/null +++ b/meson/test cases/common/194 static threads/meson.build @@ -0,0 +1,13 @@ +project('threads', 'c') + +thread_dep = dependency('threads') + + +lib1 = static_library('lib1', 'lib1.c', + dependencies : thread_dep) + +lib2 = static_library('lib2', 'lib2.c', + link_with : lib1) + +executable('prog', 'prog.c', + link_with : lib2) diff --git a/meson/test cases/common/194 static threads/prog.c b/meson/test cases/common/194 static threads/prog.c new file mode 100644 index 000000000..14a7c760f --- /dev/null +++ b/meson/test cases/common/194 static threads/prog.c @@ -0,0 +1,6 @@ +extern void *g(void); + +int main(void) { + g(); + return 0; +} diff --git a/meson/test cases/common/195 generator in subdir/com/mesonbuild/genprog.py b/meson/test cases/common/195 generator in subdir/com/mesonbuild/genprog.py new file mode 100644 index 000000000..681c43a84 --- /dev/null +++ b/meson/test cases/common/195 generator in subdir/com/mesonbuild/genprog.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 + +import os, sys, argparse + +h_templ = '''#pragma once + +int %s(void); +''' + +c_templ = '''#include"%s.h" + +int %s(void) { + return 0; +} +''' + +parser = argparse.ArgumentParser() +parser.add_argument('--searchdir', required=True) +parser.add_argument('--outdir', required=True) +parser.add_argument('ifiles', nargs='+') + +options = parser.parse_args() + +searchdir = options.searchdir +outdir = options.outdir +ifiles = options.ifiles + +rel_ofiles = [] + +for ifile in ifiles: + if not ifile.startswith(options.searchdir): + sys.exit(f'Input file {ifile} does not start with search dir {searchdir}.') + rel_ofile = ifile[len(searchdir):] + if rel_ofile[0] == '/' or rel_ofile[0] == '\\': + rel_ofile = rel_ofile[1:] + rel_ofiles.append(os.path.splitext(rel_ofile)[0]) + +ofile_bases = [os.path.join(outdir, i) for i in rel_ofiles] + +for i, ifile_name in enumerate(ifiles): + proto_name = open(ifile_name).readline().strip() + h_out = ofile_bases[i] + '.h' + c_out = ofile_bases[i] + '.c' + os.makedirs(os.path.split(ofile_bases[i])[0], exist_ok=True) + open(h_out, 'w').write(h_templ % (proto_name)) + open(c_out, 'w').write(c_templ % (proto_name, proto_name)) diff --git a/meson/test cases/common/195 generator in subdir/com/mesonbuild/meson.build b/meson/test cases/common/195 generator in subdir/com/mesonbuild/meson.build new file mode 100644 index 000000000..4808743fe --- /dev/null +++ b/meson/test cases/common/195 generator in subdir/com/mesonbuild/meson.build @@ -0,0 +1,10 @@ +gprog = find_program('genprog.py') + +gen = generator(gprog, \ + output : ['@BASENAME@.c', '@BASENAME@.h'], + arguments : ['--searchdir=@CURRENT_SOURCE_DIR@', '--outdir=@BUILD_DIR@', '@INPUT@']) + +generated = gen.process('subbie.inp') + +e = executable('testprog', 'testprog.c', generated) +test('testprog', e) diff --git a/meson/test cases/common/195 generator in subdir/com/mesonbuild/subbie.inp b/meson/test cases/common/195 generator in subdir/com/mesonbuild/subbie.inp new file mode 100644 index 000000000..df0f4e9ae --- /dev/null +++ b/meson/test cases/common/195 generator in subdir/com/mesonbuild/subbie.inp @@ -0,0 +1 @@ +subbie diff --git a/meson/test cases/common/195 generator in subdir/com/mesonbuild/testprog.c b/meson/test cases/common/195 generator in subdir/com/mesonbuild/testprog.c new file mode 100644 index 000000000..7a7cdf7aa --- /dev/null +++ b/meson/test cases/common/195 generator in subdir/com/mesonbuild/testprog.c @@ -0,0 +1,5 @@ +#include"subbie.h" + +int main(void) { + return subbie(); +} diff --git a/meson/test cases/common/195 generator in subdir/meson.build b/meson/test cases/common/195 generator in subdir/meson.build new file mode 100644 index 000000000..9b8eb7c63 --- /dev/null +++ b/meson/test cases/common/195 generator in subdir/meson.build @@ -0,0 +1,3 @@ +project('generator in subdir', 'c') + +subdir('com/mesonbuild') diff --git a/meson/test cases/common/196 subproject with features/meson.build b/meson/test cases/common/196 subproject with features/meson.build new file mode 100644 index 000000000..5bdfefbc0 --- /dev/null +++ b/meson/test cases/common/196 subproject with features/meson.build @@ -0,0 +1,17 @@ +project('proj', 'c') + +auto_subproj = subproject('sub', required: get_option('use-subproject')) +assert(auto_subproj.found(), 'Subproject should always be buildable and thus found') + +auto_dep = dependency('', fallback: ['sub', 'libSub'], required: true) +assert(auto_dep.found() == true, 'Subproject is required and foundable, dependency should be found.') + +disabled_subproj = subproject('disabled_sub', required: get_option('disabled-subproject')) +assert(disabled_subproj.found() == false, 'Disabled subproject should be NOT found') + +disabled_dep = dependency('', fallback: ['disabled_sub', 'libSub'], required: false) +assert(disabled_dep.found() == false, 'Subprojetc was disabled, it should never be built.') +nothing = executable('nothing', 'nothing.c', dependencies: [disabled_dep]) + +subproj_with_missing_dep = subproject('auto_sub_with_missing_dep', required: get_option('auto-sub-with-missing-dep')) +assert(subproj_with_missing_dep.found() == false, 'Subproject with required=auto and missing dependency should be NOT found') diff --git a/meson/test cases/common/196 subproject with features/meson_options.txt b/meson/test cases/common/196 subproject with features/meson_options.txt new file mode 100644 index 000000000..a46e5fbf5 --- /dev/null +++ b/meson/test cases/common/196 subproject with features/meson_options.txt @@ -0,0 +1,3 @@ +option('use-subproject', type : 'feature', value : 'auto') +option('disabled-subproject', type : 'feature', value : 'disabled') +option('auto-sub-with-missing-dep', type : 'feature', value : 'auto') diff --git a/meson/test cases/common/196 subproject with features/nothing.c b/meson/test cases/common/196 subproject with features/nothing.c new file mode 100644 index 000000000..58fe69254 --- /dev/null +++ b/meson/test cases/common/196 subproject with features/nothing.c @@ -0,0 +1,4 @@ +int main(void) +{ + return 0; +} diff --git a/meson/test cases/common/196 subproject with features/subprojects/auto_sub_with_missing_dep/meson.build b/meson/test cases/common/196 subproject with features/subprojects/auto_sub_with_missing_dep/meson.build new file mode 100644 index 000000000..fa6b011ea --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/auto_sub_with_missing_dep/meson.build @@ -0,0 +1,3 @@ +project('sub', 'c') + +dependency('no_way_this_exists', required: true) \ No newline at end of file diff --git a/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/meson.build b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/meson.build new file mode 100644 index 000000000..933001aeb --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/meson.build @@ -0,0 +1,3 @@ +lib = static_library('sub', 'sub.c') + +libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib) \ No newline at end of file diff --git a/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.c b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.c new file mode 100644 index 000000000..e748ac750 --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.c @@ -0,0 +1,5 @@ +#include "sub.h" + +int sub(void) { + return 0; +} diff --git a/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.h b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.h new file mode 100644 index 000000000..f1ab0e19d --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/lib/sub.h @@ -0,0 +1,6 @@ +#ifndef SUB_H +#define SUB_H + +int sub(); + +#endif diff --git a/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/meson.build b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/meson.build new file mode 100644 index 000000000..65fef032b --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/disabled_sub/meson.build @@ -0,0 +1,3 @@ +project('disabled_sub', 'c') + +subdir('lib') \ No newline at end of file diff --git a/meson/test cases/common/196 subproject with features/subprojects/sub/lib/meson.build b/meson/test cases/common/196 subproject with features/subprojects/sub/lib/meson.build new file mode 100644 index 000000000..731d22bfb --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/sub/lib/meson.build @@ -0,0 +1,2 @@ +lib = static_library('sub', 'sub.c') +libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib) diff --git a/meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.c b/meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.c new file mode 100644 index 000000000..768ed36d6 --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.c @@ -0,0 +1,5 @@ +#include "sub.h" + +int sub(void) { + return 0; +} diff --git a/meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.h b/meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.h new file mode 100644 index 000000000..2b59a3a32 --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/sub/lib/sub.h @@ -0,0 +1,6 @@ +#ifndef SUB_H +#define SUB_H + +int sub(void); + +#endif diff --git a/meson/test cases/common/196 subproject with features/subprojects/sub/meson.build b/meson/test cases/common/196 subproject with features/subprojects/sub/meson.build new file mode 100644 index 000000000..31882ac4c --- /dev/null +++ b/meson/test cases/common/196 subproject with features/subprojects/sub/meson.build @@ -0,0 +1,3 @@ +project('sub', 'c') + +subdir('lib') \ No newline at end of file diff --git a/meson/test cases/common/197 function attributes/meson.build b/meson/test cases/common/197 function attributes/meson.build new file mode 100644 index 000000000..98173096b --- /dev/null +++ b/meson/test cases/common/197 function attributes/meson.build @@ -0,0 +1,111 @@ +# Copyright © 2017-2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project('gcc func attributes', ['c', 'cpp']) + +# For msvc these will fail because msvc doesn't support __attribute__, for +# Clang and GCC, they should pass. +c = meson.get_compiler('c') +cpp = meson.get_compiler('cpp') + +if c.get_id() == 'pgi' + error('MESON_SKIP_TEST: PGI supports its own set of features, will need a separate list for PGI to test it.') +endif + +expected_result = not ['msvc', 'clang-cl', 'intel-cl'].contains(c.get_id()) + +# Q: Why is ifunc not in this list or any of the below lists? +# A: It's too damn hard to figure out if you actually support it, since it +# requires both compiler and libc support, and there isn't a good way to +# figure that out except by running the code we're trying to test. +attributes = [ + 'aligned', + 'always_inline', + 'cold', + 'const', + 'constructor', + 'constructor_priority', + 'deprecated', + 'destructor', + 'flatten', + 'format', + 'format_arg', + 'gnu_inline', + 'hot', + 'malloc', + 'noinline', + 'nonnull', + 'noreturn', + 'nothrow', + 'pure', + 'unused', + 'used', + 'warn_unused_result', + 'weak', +] + +if c.get_id() != 'intel' + # not supported by icc as of 19.0.0 + attributes += 'weakref' +endif + +# These are unsupported on darwin with apple clang 9.1.0 +if host_machine.system() != 'darwin' + attributes += 'alias' + attributes += 'visibility' + attributes += 'alloc_size' +endif + +if ['gcc', 'intel'].contains(c.get_id()) + # not supported by clang as of 5.0.0 (at least up to 6.0.1) + attributes += 'artificial' + attributes += 'error' + attributes += 'externally_visible' + attributes += 'leaf' + attributes += 'noclone' + attributes += 'optimize' + attributes += 'warning' + + if c.get_id() == 'gcc' and c.version().version_compare('>= 7.0.0') + attributes += 'fallthrough' + endif +endif + +if get_option('mode') == 'single' + foreach a : attributes + x = c.has_function_attribute(a) + assert(x == expected_result, '@0@: @1@'.format(c.get_id(), a)) + x = cpp.has_function_attribute(a) + assert(x == expected_result, '@0@: @1@'.format(cpp.get_id(), a)) + endforeach + + win_expect = ['windows', 'cygwin'].contains(host_machine.system()) + foreach a : ['dllexport', 'dllimport'] + assert(c.has_function_attribute(a) == win_expect, + '@0@: @1@'.format(c.get_id(), a)) + assert(cpp.has_function_attribute(a) == win_expect, + '@0@: @1@'.format(cpp.get_id(), a)) + endforeach +else + if not ['msvc', 'clang-cl', 'intel-cl'].contains(c.get_id()) + multi_expected = attributes + else + multi_expected = [] + endif + + multi_check = c.get_supported_function_attributes(attributes) + assert(multi_check == multi_expected, 'get_supported_function_arguments works (C)') + multi_check = cpp.get_supported_function_attributes(attributes) + assert(multi_check == multi_expected, 'get_supported_function_arguments works (C++)') +endif diff --git a/meson/test cases/common/197 function attributes/meson_options.txt b/meson/test cases/common/197 function attributes/meson_options.txt new file mode 100644 index 000000000..4a1d87ce7 --- /dev/null +++ b/meson/test cases/common/197 function attributes/meson_options.txt @@ -0,0 +1,7 @@ +option( + 'mode', + type : 'combo', + choices : ['single', 'parallel'], + value : 'single', + description : 'Test the one at a time function or many at a time function.' +) diff --git a/meson/test cases/common/197 function attributes/test.json b/meson/test cases/common/197 function attributes/test.json new file mode 100644 index 000000000..d06a24a00 --- /dev/null +++ b/meson/test cases/common/197 function attributes/test.json @@ -0,0 +1,10 @@ +{ + "matrix": { + "options": { + "mode": [ + { "val": "single" }, + { "val": "parallel" } + ] + } + } +} diff --git a/meson/test cases/common/198 broken subproject/meson.build b/meson/test cases/common/198 broken subproject/meson.build new file mode 100644 index 000000000..e3a6cae36 --- /dev/null +++ b/meson/test cases/common/198 broken subproject/meson.build @@ -0,0 +1,2 @@ +project('test broken subproject') +subproject('broken', required : false) diff --git a/meson/test cases/common/198 broken subproject/subprojects/broken/broken.c b/meson/test cases/common/198 broken subproject/subprojects/broken/broken.c new file mode 100644 index 000000000..a9fc4b1e2 --- /dev/null +++ b/meson/test cases/common/198 broken subproject/subprojects/broken/broken.c @@ -0,0 +1 @@ +#error This must not compile diff --git a/meson/test cases/common/198 broken subproject/subprojects/broken/meson.build b/meson/test cases/common/198 broken subproject/subprojects/broken/meson.build new file mode 100644 index 000000000..2d64fdeda --- /dev/null +++ b/meson/test cases/common/198 broken subproject/subprojects/broken/meson.build @@ -0,0 +1,4 @@ +project('broken', 'c') + +executable('app', 'broken.c') +assert(false, 'This subproject must fail') diff --git a/meson/test cases/common/199 argument syntax/meson.build b/meson/test cases/common/199 argument syntax/meson.build new file mode 100644 index 000000000..b97ca7432 --- /dev/null +++ b/meson/test cases/common/199 argument syntax/meson.build @@ -0,0 +1,19 @@ +project( + 'argument syntax', + ['c'], +) + +cc = meson.get_compiler('c') + +if ['gcc', 'lcc', 'clang', 'intel'].contains(cc.get_id()) + expected = 'gcc' +elif ['msvc', 'clang-cl', 'intel-cl'].contains(cc.get_id()) + expected = 'msvc' +else + # It's possible that other compilers end up here that shouldn't + expected = 'other' +endif + +assert(cc.get_argument_syntax() == expected, + 'Wrong output for compiler @0@. expected @1@ but got @2@'.format( + cc.get_id(), expected, cc.get_argument_syntax())) diff --git a/meson/test cases/common/2 cpp/VERSIONFILE b/meson/test cases/common/2 cpp/VERSIONFILE new file mode 100644 index 000000000..3eefcb9dd --- /dev/null +++ b/meson/test cases/common/2 cpp/VERSIONFILE @@ -0,0 +1 @@ +1.0.0 diff --git a/meson/test cases/common/2 cpp/cpp.C b/meson/test cases/common/2 cpp/cpp.C new file mode 100644 index 000000000..d3df47689 --- /dev/null +++ b/meson/test cases/common/2 cpp/cpp.C @@ -0,0 +1,6 @@ +#include + +int main(void) { + std::cout << "C++ seems to be working." << std::endl; + return 0; +} diff --git a/meson/test cases/common/2 cpp/meson.build b/meson/test cases/common/2 cpp/meson.build new file mode 100644 index 000000000..2962681c0 --- /dev/null +++ b/meson/test cases/common/2 cpp/meson.build @@ -0,0 +1,41 @@ +project('c++ test', 'cpp', version: files('VERSIONFILE')) + +cpp = meson.get_compiler('cpp') +if cpp.get_id() == 'intel' + # Error out if the -std=xxx option is incorrect + add_project_arguments('-diag-error', '10159', language : 'cpp') +elif cpp.get_id() == 'intel-cl' + add_project_arguments('/Qdiag-error:10159', language : 'cpp') +endif + +exe = executable('trivialprog', 'trivial.cc', extra_files : 'something.txt') +test('runtest', exe) + +has_not_changed = false +if is_disabler(exe) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Executable has changed.') + +assert(not is_disabler(exe), 'Executable is a disabler.') + +exe = executable('trivialprog', 'trivial.cc', extra_files : disabler()) + +assert(is_disabler(exe), 'Executable is not a disabler.') + +if exe.found() + exe_disabled = false +else + exe_disabled = true +endif + +assert(exe_disabled, 'Executable was not disabled.') + +if cpp.get_id() == 'msvc' + exe = executable('cppprog', 'cpp.C', cpp_args : '/TP') +else + exe = executable('cppprog', 'cpp.C') +endif +test('cpptest', exe) diff --git a/meson/test cases/common/2 cpp/something.txt b/meson/test cases/common/2 cpp/something.txt new file mode 100644 index 000000000..9f6cc91b1 --- /dev/null +++ b/meson/test cases/common/2 cpp/something.txt @@ -0,0 +1 @@ +This file is only here so it shows up in IDEs as part of this target. diff --git a/meson/test cases/common/2 cpp/trivial.cc b/meson/test cases/common/2 cpp/trivial.cc new file mode 100644 index 000000000..d3df47689 --- /dev/null +++ b/meson/test cases/common/2 cpp/trivial.cc @@ -0,0 +1,6 @@ +#include + +int main(void) { + std::cout << "C++ seems to be working." << std::endl; + return 0; +} diff --git a/meson/test cases/common/20 global arg/meson.build b/meson/test cases/common/20 global arg/meson.build new file mode 100644 index 000000000..2a1c73669 --- /dev/null +++ b/meson/test cases/common/20 global arg/meson.build @@ -0,0 +1,16 @@ +project('global arg test', 'cpp', 'c') + +add_global_arguments('-DMYTHING', language : 'c') +add_global_arguments('-DMYCPPTHING', language : 'cpp') +add_global_arguments('-DGLOBAL_HOST', language : 'c') + +build_c_args = ['-DARG_BUILD'] +c_args = ['-DARG_HOST'] + +add_global_arguments('-DMYCANDCPPTHING', language: ['c', 'cpp']) + +exe2 = executable('prog2', 'prog.c', c_args : c_args) +exe3 = executable('prog3', 'prog.cc') + +test('prog2', exe2) +test('prog3', exe3) diff --git a/meson/test cases/common/20 global arg/prog.c b/meson/test cases/common/20 global arg/prog.c new file mode 100644 index 000000000..2a71236b6 --- /dev/null +++ b/meson/test cases/common/20 global arg/prog.c @@ -0,0 +1,43 @@ +#ifndef MYTHING + #error "Global argument not set" +#endif + +#ifdef MYCPPTHING + #error "Wrong global argument set" +#endif + +#ifndef MYCANDCPPTHING + #error "Global argument not set" +#endif + +#if !defined(GLOBAL_HOST) && !defined(GLOBAL_BUILD) + #error "Neither global_host nor glogal_build is set." +#endif + +#if defined(GLOBAL_HOST) && defined(GLOBAL_BUILD) + #error "Both global build and global host set." +#endif + +#ifdef GLOBAL_BUILD + #ifndef ARG_BUILD + #error "Global is build but arg_build is not set." + #endif + + #ifdef ARG_HOST + #error "Global is build but arg host is set." + #endif +#endif + +#ifdef GLOBAL_HOST + #ifndef ARG_HOST + #error "Global is host but arg_host is not set." + #endif + + #ifdef ARG_BUILD + #error "Global is host but arg_build is set." + #endif +#endif + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/20 global arg/prog.cc b/meson/test cases/common/20 global arg/prog.cc new file mode 100644 index 000000000..5c3220992 --- /dev/null +++ b/meson/test cases/common/20 global arg/prog.cc @@ -0,0 +1,15 @@ +#ifdef MYTHING +#error "Wrong global argument set" +#endif + +#ifndef MYCPPTHING +#error "Global argument not set" +#endif + +#ifndef MYCANDCPPTHING +#error "Global argument not set" +#endif + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/200 install name_prefix name_suffix/libfile.c b/meson/test cases/common/200 install name_prefix name_suffix/libfile.c new file mode 100644 index 000000000..91489b287 --- /dev/null +++ b/meson/test cases/common/200 install name_prefix name_suffix/libfile.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func(void) { + return 0; +} diff --git a/meson/test cases/common/200 install name_prefix name_suffix/meson.build b/meson/test cases/common/200 install name_prefix name_suffix/meson.build new file mode 100644 index 000000000..044f915c0 --- /dev/null +++ b/meson/test cases/common/200 install name_prefix name_suffix/meson.build @@ -0,0 +1,13 @@ +project('library with name_prefix name_suffix test', 'c') + +shared_library('foo', 'libfile.c', name_prefix: '', install : true) +static_library('bar', 'libfile.c', name_prefix: '', install : true) + +shared_library('baz', 'libfile.c', name_suffix: 'cheese', install : true) +static_library('qux', 'libfile.c', name_suffix: 'cheese', install : true) + +shared_library('corge', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true) +static_library('grault', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true) + +# exercise default name_prefix and name_suffix +shared_library('garply', 'libfile.c', name_prefix: [], name_suffix: [], install : true) diff --git a/meson/test cases/common/200 install name_prefix name_suffix/test.json b/meson/test cases/common/200 install name_prefix name_suffix/test.json new file mode 100644 index 000000000..b92a98538 --- /dev/null +++ b/meson/test cases/common/200 install name_prefix name_suffix/test.json @@ -0,0 +1,19 @@ +{ + "installed": [ + {"type": "pdb", "file": "usr/bin/baz"}, + {"type": "pdb", "file": "usr/bin/bowcorge"}, + {"type": "pdb", "file": "usr/bin/foo"}, + {"type": "expr", "file": "usr/?lib/bowcorge.stern"}, + {"type": "expr", "file": "usr/lib/?libbaz.cheese"}, + {"type": "file", "file": "usr/lib/bar.a"}, + {"type": "implib", "file": "usr/lib/bowcorge"}, + {"type": "file", "file": "usr/lib/bowgrault.stern"}, + {"type": "implib", "file": "usr/lib/foo"}, + {"type": "expr", "file": "usr/lib/foo?so"}, + {"type": "implib", "file": "usr/lib/libbaz"}, + {"type": "file", "file": "usr/lib/libqux.cheese"}, + {"type": "expr", "file": "usr/?lib/libgarply?so"}, + {"type": "implib", "file": "usr/lib/libgarply"}, + {"type": "pdb", "file": "usr/bin/garply"} + ] +} diff --git a/meson/test cases/common/201 kwarg entry/inc/prog.h b/meson/test cases/common/201 kwarg entry/inc/prog.h new file mode 100644 index 000000000..665521dee --- /dev/null +++ b/meson/test cases/common/201 kwarg entry/inc/prog.h @@ -0,0 +1,3 @@ +#pragma once + +#define MESSAGE "Hello there.\n" diff --git a/meson/test cases/common/201 kwarg entry/meson.build b/meson/test cases/common/201 kwarg entry/meson.build new file mode 100644 index 000000000..564ec37fc --- /dev/null +++ b/meson/test cases/common/201 kwarg entry/meson.build @@ -0,0 +1,7 @@ +project('kwarg', 'c') + +default_kwargs = {'install': true, + 'include_directories': include_directories('inc')} + +executable('prog', 'prog.c', + kwargs: default_kwargs) diff --git a/meson/test cases/common/201 kwarg entry/prog.c b/meson/test cases/common/201 kwarg entry/prog.c new file mode 100644 index 000000000..2eec26bbb --- /dev/null +++ b/meson/test cases/common/201 kwarg entry/prog.c @@ -0,0 +1,7 @@ +#include +#include + +int main(void) { + printf(MESSAGE); + return 0; +} diff --git a/meson/test cases/common/201 kwarg entry/test.json b/meson/test cases/common/201 kwarg entry/test.json new file mode 100644 index 000000000..135300de5 --- /dev/null +++ b/meson/test cases/common/201 kwarg entry/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/prog"}, + {"type": "pdb", "file": "usr/bin/prog"} + ] +} diff --git a/meson/test cases/common/202 custom target build by default/docgen.py b/meson/test cases/common/202 custom target build by default/docgen.py new file mode 100644 index 000000000..f343f2178 --- /dev/null +++ b/meson/test cases/common/202 custom target build by default/docgen.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 + +import os +import sys + +out = sys.argv[1] + +os.mkdir(out) + +for name in ('a', 'b', 'c'): + with open(os.path.join(out, name + '.txt'), 'w') as f: + f.write(name) diff --git a/meson/test cases/common/202 custom target build by default/meson.build b/meson/test cases/common/202 custom target build by default/meson.build new file mode 100644 index 000000000..7c81aa254 --- /dev/null +++ b/meson/test cases/common/202 custom target build by default/meson.build @@ -0,0 +1,10 @@ +project('custom-target-dir-install', 'c') + +docgen = find_program('docgen.py') + +custom_target('docgen', + output : 'html', + command : [docgen, '@OUTPUT@'], + install : true, + build_by_default : false, + install_dir : join_paths(get_option('datadir'), 'doc/testpkgname')) diff --git a/meson/test cases/common/202 custom target build by default/test.json b/meson/test cases/common/202 custom target build by default/test.json new file mode 100644 index 000000000..df8bcb959 --- /dev/null +++ b/meson/test cases/common/202 custom target build by default/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + + ] +} diff --git a/meson/test cases/common/203 find_library and headers/foo.h b/meson/test cases/common/203 find_library and headers/foo.h new file mode 100644 index 000000000..014e06e82 --- /dev/null +++ b/meson/test cases/common/203 find_library and headers/foo.h @@ -0,0 +1 @@ +#define VAL 42 diff --git a/meson/test cases/common/203 find_library and headers/meson.build b/meson/test cases/common/203 find_library and headers/meson.build new file mode 100644 index 000000000..bcd71f12a --- /dev/null +++ b/meson/test cases/common/203 find_library and headers/meson.build @@ -0,0 +1,23 @@ +project('find library and headers', 'c') + +cc = meson.get_compiler('c') + +if not cc.find_library('z', required : false).found() + error('MESON_SKIP_TEST: zlib not found.') +endif + +lib = cc.find_library('z', + has_headers : 'foo.h', + required : false) +assert(not lib.found(), 'Header should be missing') + +lib = cc.find_library('z', + has_headers : 'foo.h', + header_include_directories : include_directories('.')) +assert(lib.found(), 'Header should be found') + +lib = cc.find_library('z', + has_headers : ['foo.h', 'bar.h'], + header_include_directories : include_directories('.'), + required : false) +assert(not lib.found(), 'One header should be missing') diff --git a/meson/test cases/common/204 line continuation/meson.build b/meson/test cases/common/204 line continuation/meson.build new file mode 100644 index 000000000..16c72f9ae --- /dev/null +++ b/meson/test cases/common/204 line continuation/meson.build @@ -0,0 +1,17 @@ +project('line continuation') + +a = 1 +b = 2 + +c = a \ ++b +assert(c == 3, 'Line continuation is not working') + +d = a + \ + b +assert(d == 3, 'Line continuation is not working') + +if a == 1 and \ + b == 3 + error('Line continuation in "if" condition is not working') +endif diff --git a/meson/test cases/common/205 native file path override/main.cpp b/meson/test cases/common/205 native file path override/main.cpp new file mode 100644 index 000000000..91bc80906 --- /dev/null +++ b/meson/test cases/common/205 native file path override/main.cpp @@ -0,0 +1,5 @@ +#include + +int main(void) { + std::cout << "Hello world!" << std::endl; +} diff --git a/meson/test cases/common/205 native file path override/meson.build b/meson/test cases/common/205 native file path override/meson.build new file mode 100644 index 000000000..142ca1cfc --- /dev/null +++ b/meson/test cases/common/205 native file path override/meson.build @@ -0,0 +1,7 @@ +project('native file install dir override', 'cpp') + +if meson.is_cross_build() + error('MESON_SKIP_TEST cannot test native build rules in cross build') +endif + +executable('main', 'main.cpp', install : true) diff --git a/meson/test cases/common/205 native file path override/nativefile.ini b/meson/test cases/common/205 native file path override/nativefile.ini new file mode 100644 index 000000000..1c295c7d4 --- /dev/null +++ b/meson/test cases/common/205 native file path override/nativefile.ini @@ -0,0 +1,2 @@ +[paths] +bindir = 'custom_bindir' diff --git a/meson/test cases/common/205 native file path override/test.json b/meson/test cases/common/205 native file path override/test.json new file mode 100644 index 000000000..7954c8ea8 --- /dev/null +++ b/meson/test cases/common/205 native file path override/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/custom_bindir/main"}, + {"type": "pdb", "file": "usr/custom_bindir/main"} + ] +} diff --git a/meson/test cases/common/206 tap tests/cat.c b/meson/test cases/common/206 tap tests/cat.c new file mode 100644 index 000000000..4b92010ad --- /dev/null +++ b/meson/test cases/common/206 tap tests/cat.c @@ -0,0 +1,26 @@ +#include +#include + +int main(int argc, char **argv) { + char buf[1024]; + size_t len; + FILE *fh; + + if (argc != 2) { + fprintf(stderr, "Incorrect number of arguments, got %i\n", argc); + return 1; + } + fh = fopen(argv[1], "r"); + if (fh == NULL) { + fprintf(stderr, "Opening %s: errno=%i\n", argv[1], errno); + return 1; + } + do { + len = fread(buf, 1, sizeof(buf), fh); + if (len > 0) { + fwrite(buf, 1, len, stdout); + } + } while (len > 0); + fclose(fh); + return 0; +} diff --git a/meson/test cases/common/206 tap tests/issue7515.txt b/meson/test cases/common/206 tap tests/issue7515.txt new file mode 100644 index 000000000..ca8563778 --- /dev/null +++ b/meson/test cases/common/206 tap tests/issue7515.txt @@ -0,0 +1,27 @@ +1..26 +ok 1 Gtk overrides UI template sets up internal and public template children +ok 2 Gtk overrides UI template sets up public template children with the correct widgets +ok 3 Gtk overrides UI template sets up internal template children with the correct widgets +ok 4 Gtk overrides UI template connects template callbacks to the correct handler +ok 5 Gtk overrides UI template binds template callbacks to the correct object +ok 6 Gtk overrides UI template from resource sets up internal and public template children +ok 7 Gtk overrides UI template from resource sets up public template children with the correct widgets +ok 8 Gtk overrides UI template from resource sets up internal template children with the correct widgets +ok 9 Gtk overrides UI template from resource connects template callbacks to the correct handler +ok 10 Gtk overrides UI template from resource binds template callbacks to the correct object +ok 11 Gtk overrides UI template from file sets up internal and public template children +ok 12 Gtk overrides UI template from file sets up public template children with the correct widgets +ok 13 Gtk overrides UI template from file sets up internal template children with the correct widgets +ok 14 Gtk overrides UI template from file connects template callbacks to the correct handler +ok 15 Gtk overrides UI template from file binds template callbacks to the correct object +ok 16 Gtk overrides Class inheriting from template class sets up internal and public template children # SKIP pending +ok 17 Gtk overrides Class inheriting from template class sets up public template children with the correct widgets # SKIP pending +ok 18 Gtk overrides Class inheriting from template class sets up internal template children with the correct widgets # SKIP pending +ok 19 Gtk overrides Class inheriting from template class connects template callbacks to the correct handler # SKIP pending +ok 20 Gtk overrides Class inheriting from template class binds template callbacks to the correct object # SKIP pending +ok 21 Gtk overrides sets CSS names on classes +ok 22 Gtk overrides avoid crashing when GTK vfuncs are called in garbage collection +ok 23 Gtk overrides accepts string in place of GdkAtom +ok 24 Gtk overrides accepts null in place of GdkAtom as GDK_NONE +ok 25 Gtk overrides uses the correct GType for null child properties +ok 26 Gtk overrides can create a Gtk.TreeIter with accessible stamp field diff --git a/meson/test cases/common/206 tap tests/meson.build b/meson/test cases/common/206 tap tests/meson.build new file mode 100644 index 000000000..522131965 --- /dev/null +++ b/meson/test cases/common/206 tap tests/meson.build @@ -0,0 +1,14 @@ +project('test features', 'c') + +tester = executable('tester', 'tester.c') +cat = executable('cat', 'cat.c') +test('pass', tester, args : ['ok'], protocol: 'tap') +test('fail', tester, args : ['not ok'], should_fail: true, protocol: 'tap') +test('xfail', tester, args : ['not ok # todo'], protocol: 'tap') +test('xpass', tester, args : ['ok # todo'], should_fail: true, protocol: 'tap') +test('skip', tester, args : ['ok # skip'], protocol: 'tap') +test('partially skipped', tester, args : ['ok 1\nok 2 # skip'], protocol: 'tap') +test('partially skipped (real-world example)', cat, args : [files('issue7515.txt')], protocol: 'tap') +test('skip comment', tester, args : ['ok # Skipped: with a comment'], protocol: 'tap') +test('skip failure', tester, args : ['not ok # skip'], should_fail: true, protocol: 'tap') +test('no tests', tester, args : ['1..0 # skip'], protocol: 'tap') diff --git a/meson/test cases/common/206 tap tests/tester.c b/meson/test cases/common/206 tap tests/tester.c new file mode 100644 index 000000000..ac582e7c0 --- /dev/null +++ b/meson/test cases/common/206 tap tests/tester.c @@ -0,0 +1,10 @@ +#include + +int main(int argc, char **argv) { + if (argc != 2) { + fprintf(stderr, "Incorrect number of arguments, got %i\n", argc); + return 1; + } + puts(argv[1]); + return 0; +} diff --git a/meson/test cases/common/207 warning level 0/main.cpp b/meson/test cases/common/207 warning level 0/main.cpp new file mode 100644 index 000000000..954d9ce3e --- /dev/null +++ b/meson/test cases/common/207 warning level 0/main.cpp @@ -0,0 +1,12 @@ +#include + +#define PROJECT_NAME "demo" + +int main(int argc, char **argv) { + if(argc != 1) { + std::cout << argv[0] << "takes no arguments.\n"; + return 1; + } + std::cout << "This is project " << PROJECT_NAME << ".\n"; + return 0; +} diff --git a/meson/test cases/common/207 warning level 0/meson.build b/meson/test cases/common/207 warning level 0/meson.build new file mode 100644 index 000000000..f2bd3390b --- /dev/null +++ b/meson/test cases/common/207 warning level 0/meson.build @@ -0,0 +1,3 @@ +project('warning_level', 'cpp', default_options : ['warning_level=0']) + +exe = executable('main', 'main.cpp', install : false) diff --git a/meson/test cases/common/208 link custom/custom_stlib.py b/meson/test cases/common/208 link custom/custom_stlib.py new file mode 100755 index 000000000..6a090f3db --- /dev/null +++ b/meson/test cases/common/208 link custom/custom_stlib.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 + +import shutil, sys, subprocess, argparse, pathlib +import platform + +parser = argparse.ArgumentParser() + +parser.add_argument('--private-dir', required=True) +parser.add_argument('-o', required=True) +parser.add_argument('cmparr', nargs='+') + +contents = '''#include + +void flob(void) { + printf("Now flobbing.\\n"); +} +''' + +def get_pic_args(): + platname = platform.system().lower() + if platname in ['windows', 'darwin'] or sys.platform == 'cygwin': + return [] + return ['-fPIC'] + +def generate_lib_gnulike(outfile, c_file, private_dir, compiler_array): + if shutil.which('ar'): + static_linker = 'ar' + elif shutil.which('llvm-ar'): + static_linker = 'llvm-ar' + elif shutil.which('gcc-ar'): + static_linker = 'gcc-ar' + else: + sys.exit('Could not detect a static linker.') + o_file = c_file.with_suffix('.o') + compile_cmd = compiler_array + ['-c', '-g', '-O2', '-o', str(o_file), str(c_file)] + compile_cmd += get_pic_args() + subprocess.check_call(compile_cmd) + out_file = pathlib.Path(outfile) + if out_file.exists(): + out_file.unlink() + link_cmd = [static_linker, 'csr', outfile, str(o_file)] + subprocess.check_call(link_cmd) + return 0 + + +def generate_lib_msvc(outfile, c_file, private_dir, compiler_array): + static_linker = 'lib' + o_file = c_file.with_suffix('.obj') + compile_cmd = compiler_array + ['/MDd', + '/nologo', + '/ZI', + '/Ob0', + '/Od', + '/c', + '/Fo' + str(o_file), + str(c_file)] + subprocess.check_call(compile_cmd) + out_file = pathlib.Path(outfile) + if out_file.exists(): + out_file.unlink() + link_cmd = [static_linker, + '/nologo', + '/OUT:' + str(outfile), + str(o_file)] + subprocess.check_call(link_cmd) + return 0 + +def generate_lib(outfile, private_dir, compiler_array): + private_dir = pathlib.Path(private_dir) + if not private_dir.exists(): + private_dir.mkdir() + c_file = private_dir / 'flob.c' + c_file.write_text(contents) + for i in compiler_array: + if (i.endswith('cl') or i.endswith('cl.exe')) and 'clang-cl' not in i: + return generate_lib_msvc(outfile, c_file, private_dir, compiler_array) + return generate_lib_gnulike(outfile, c_file, private_dir, compiler_array) + +if __name__ == '__main__': + options = parser.parse_args() + sys.exit(generate_lib(options.o, options.private_dir, options.cmparr)) diff --git a/meson/test cases/common/208 link custom/custom_target.c b/meson/test cases/common/208 link custom/custom_target.c new file mode 100644 index 000000000..1bbe82c38 --- /dev/null +++ b/meson/test cases/common/208 link custom/custom_target.c @@ -0,0 +1,6 @@ +void outer_lib_func(void); + +int main(void) { + outer_lib_func(); + return 0; +} diff --git a/meson/test cases/common/208 link custom/custom_target.py b/meson/test cases/common/208 link custom/custom_target.py new file mode 100644 index 000000000..c246344da --- /dev/null +++ b/meson/test cases/common/208 link custom/custom_target.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import shutil, sys + +if __name__ == '__main__': + shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/208 link custom/dummy.c b/meson/test cases/common/208 link custom/dummy.c new file mode 100644 index 000000000..53a4a402d --- /dev/null +++ b/meson/test cases/common/208 link custom/dummy.c @@ -0,0 +1 @@ +void inner_lib_func(void) {} \ No newline at end of file diff --git a/meson/test cases/common/208 link custom/lib.c b/meson/test cases/common/208 link custom/lib.c new file mode 100644 index 000000000..585b6c905 --- /dev/null +++ b/meson/test cases/common/208 link custom/lib.c @@ -0,0 +1,7 @@ +void flob(void); + +int foo(void) +{ + flob(); + return 0; +} diff --git a/meson/test cases/common/208 link custom/meson.build b/meson/test cases/common/208 link custom/meson.build new file mode 100644 index 000000000..4d4f65591 --- /dev/null +++ b/meson/test cases/common/208 link custom/meson.build @@ -0,0 +1,86 @@ +project('linkcustom', 'c') + +# This would require passing the static linker to the build script or having +# it detect it by itself. I'm too lazy to implement it now and it is not +# really needed for testing that custom targets work. It is the responsibility +# of the custom target to produce things in the correct format. +assert(not meson.is_cross_build(), + 'MESON_SKIP_TEST cross checking not implemented.') + +cc = meson.get_compiler('c') +genprog = find_program('custom_stlib.py') + +clib = custom_target('linkcustom', + output: 'libflob.a', + command: [genprog, + '-o', '@OUTPUT@', + '--private-dir', '@PRIVATE_DIR@'] + cc.cmd_array()) + +# custom_target tests + +exe = executable('prog', 'prog.c', link_with: clib) +test('linkcustom', exe) + +d = declare_dependency(link_with: clib) + +exe2 = executable('prog2', 'prog.c', dependencies: d) +test('linkcustom2', exe2) + +# Link whole tests + +if meson.backend() == 'xcode' + message('Xcode does not support link whole so skipping.') +else + exe3 = executable('prog3', 'prog.c', link_whole: clib) + test('linkwhole', exe) + + d2 = declare_dependency(link_whole: clib) + + exe4 = executable('prog4', 'prog.c', dependencies: d2) + test('linkwhole2', exe2) +endif + +# custom_target[i] tests + +exe_i = executable('prog_i', 'prog.c', link_with: clib[0]) +test('linkcustom', exe_i) + +d_i = declare_dependency(link_with: clib[0]) + +exe2_i = executable('prog2_i', 'prog.c', dependencies: d_i) +test('linkcustom2_i', exe2_i) + +# Link whole tests + +if meson.backend() == 'xcode' + message('Xcode does not support link whole so skipping.') +else + shared_library('lib1', 'lib.c', link_whole: clib) + + exe3_i = executable('prog3_i', 'prog.c', link_whole: clib[0]) + test('linkwhole', exe) + + d2_i = declare_dependency(link_whole: clib[0]) + + exe4_i = executable('prog4_i', 'prog.c', dependencies: d2_i) + test('linkwhole2_i', exe2_i) +endif + +# Link with custom target + +dummy = static_library('dummy', 'dummy.c') + +custom_prog = find_program('custom_target.py') +t = custom_target('custom', input: dummy, output: 'libcustom.a', command: [custom_prog, '@INPUT@', '@OUTPUT@']) + +dep1 = declare_dependency(link_with: t) +dep2 = declare_dependency(link_with: t[0]) + +lib1 = static_library('lib1', 'outerlib.c', dependencies: dep1) +lib2 = static_library('lib2', 'outerlib.c', dependencies: dep2) + +exe1 = executable('exe1', 'custom_target.c', link_with: lib1) +test('custom_target_1', exe1) + +exe1_2 = executable('exe1_2', 'custom_target.c', link_with: lib2) +test('custom_target_2', exe2) \ No newline at end of file diff --git a/meson/test cases/common/208 link custom/outerlib.c b/meson/test cases/common/208 link custom/outerlib.c new file mode 100644 index 000000000..6861f8d45 --- /dev/null +++ b/meson/test cases/common/208 link custom/outerlib.c @@ -0,0 +1,3 @@ +void inner_lib_func(void); + +void outer_lib_func(void) { inner_lib_func(); } \ No newline at end of file diff --git a/meson/test cases/common/208 link custom/prog.c b/meson/test cases/common/208 link custom/prog.c new file mode 100644 index 000000000..efecbef34 --- /dev/null +++ b/meson/test cases/common/208 link custom/prog.c @@ -0,0 +1,6 @@ +void flob(void); + +int main(void) { + flob(); + return 0; +} diff --git a/meson/test cases/common/209 link custom_i single from multiple/generate_conflicting_stlibs.py b/meson/test cases/common/209 link custom_i single from multiple/generate_conflicting_stlibs.py new file mode 100644 index 000000000..42d6631dd --- /dev/null +++ b/meson/test cases/common/209 link custom_i single from multiple/generate_conflicting_stlibs.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 + +import shutil, sys, subprocess, argparse, pathlib + +parser = argparse.ArgumentParser() + +parser.add_argument('--private-dir', required=True) +parser.add_argument('-o', nargs='+', required=True) +parser.add_argument('cmparr', nargs='+') + +contents = [''' +int flob() { + return 0; +} +''', ''' +int flob() { + return 1; +} +'''] + +def generate_lib_gnulike(outfile, c_file, private_dir, compiler_array): + if shutil.which('ar'): + static_linker = 'ar' + elif shutil.which('llvm-ar'): + static_linker = 'llvm-ar' + elif shutil.which('gcc-ar'): + static_linker = 'gcc-ar' + else: + sys.exit('Could not detect a static linker.') + o_file = c_file.with_suffix('.o') + compile_cmd = compiler_array + ['-c', '-g', '-O2', '-o', str(o_file), str(c_file)] + subprocess.check_call(compile_cmd) + out_file = pathlib.Path(outfile) + if out_file.exists(): + out_file.unlink() + link_cmd = [static_linker, 'csr', outfile, str(o_file)] + subprocess.check_call(link_cmd) + return 0 + + +def generate_lib_msvc(outfile, c_file, private_dir, compiler_array): + static_linker = 'lib' + o_file = c_file.with_suffix('.obj') + compile_cmd = compiler_array + ['/MDd', + '/nologo', + '/ZI', + '/Ob0', + '/Od', + '/c', + '/Fo' + str(o_file), + str(c_file)] + subprocess.check_call(compile_cmd) + out_file = pathlib.Path(outfile) + if out_file.exists(): + out_file.unlink() + link_cmd = [static_linker, + '/nologo', + '/OUT:' + str(outfile), + str(o_file)] + subprocess.check_call(link_cmd) + return 0 + +def generate_lib(outfiles, private_dir, compiler_array): + private_dir = pathlib.Path(private_dir) + if not private_dir.exists(): + private_dir.mkdir() + + for i, content in enumerate(contents): + c_file = private_dir / ('flob_' + str(i + 1) + '.c') + c_file.write_text(content) + outfile = outfiles[i] + + cl_found = False + for cl_arg in compiler_array: + if (cl_arg.endswith('cl') or cl_arg.endswith('cl.exe')) and 'clang-cl' not in cl_arg: + ret = generate_lib_msvc(outfile, c_file, private_dir, compiler_array) + if ret > 0: + return ret + else: + cl_found = True + break + if not cl_found: + ret = generate_lib_gnulike(outfile, c_file, private_dir, compiler_array) + if ret > 0: + return ret + return 0 + +if __name__ == '__main__': + options = parser.parse_args() + sys.exit(generate_lib(options.o, options.private_dir, options.cmparr)) diff --git a/meson/test cases/common/209 link custom_i single from multiple/meson.build b/meson/test cases/common/209 link custom_i single from multiple/meson.build new file mode 100644 index 000000000..7aadb17d4 --- /dev/null +++ b/meson/test cases/common/209 link custom_i single from multiple/meson.build @@ -0,0 +1,42 @@ +project('linkcustom', 'c') + +# This would require passing the static linker to the build script or having +# it detect it by itself. I'm too lazy to implement it now and it is not +# really needed for testing that custom targets work. It is the responsibility +# of the custom target to produce things in the correct format. +assert(not meson.is_cross_build(), + 'MESON_SKIP_TEST cross checking not implemented.') + +cc = meson.get_compiler('c') +genprog = find_program('generate_conflicting_stlibs.py') + +clib = custom_target('linkcustom', + output: ['libflob_1.a', 'libflob_2.a'], + command: [genprog, + '-o', '@OUTPUT@', + '--private-dir', '@PRIVATE_DIR@'] + cc.cmd_array()) + +clib_2 = clib[1] + +exe = executable('prog', 'prog.c', link_with: clib_2) +test('linkcustom', exe) + +d = declare_dependency(link_with: clib_2) + +exe2 = executable('prog2', 'prog.c', dependencies: d) +test('linkcustom2', exe2) + +# Link whole tests + +if meson.backend() == 'xcode' + message('Xcode does not support link whole so skipping.') + subdir_done() +endif + +exe3 = executable('prog3', 'prog.c', link_whole: clib_2) +test('linkwhole', exe) + +d2 = declare_dependency(link_whole: clib_2) + +exe4 = executable('prog4', 'prog.c', dependencies: d2) +test('linkwhole2', exe2) diff --git a/meson/test cases/common/209 link custom_i single from multiple/prog.c b/meson/test cases/common/209 link custom_i single from multiple/prog.c new file mode 100644 index 000000000..040672a42 --- /dev/null +++ b/meson/test cases/common/209 link custom_i single from multiple/prog.c @@ -0,0 +1,5 @@ +int flob(void); + +int main(void) { + return (flob() == 1 ? 0 : 1); +} diff --git a/meson/test cases/common/21 target arg/func.c b/meson/test cases/common/21 target arg/func.c new file mode 100644 index 000000000..8c0659e96 --- /dev/null +++ b/meson/test cases/common/21 target arg/func.c @@ -0,0 +1,9 @@ +#ifndef CTHING +#error "Local argument not set" +#endif + +#ifdef CPPTHING +#error "Wrong local argument set" +#endif + +int func(void) { return 0; } diff --git a/meson/test cases/common/21 target arg/func2.c b/meson/test cases/common/21 target arg/func2.c new file mode 100644 index 000000000..1897cf7d2 --- /dev/null +++ b/meson/test cases/common/21 target arg/func2.c @@ -0,0 +1,9 @@ +#ifdef CTHING +#error "Local C argument set in wrong target" +#endif + +#ifdef CPPTHING +#error "Local CPP argument set in wrong target" +#endif + +int func(void) { return 0; } diff --git a/meson/test cases/common/21 target arg/meson.build b/meson/test cases/common/21 target arg/meson.build new file mode 100644 index 000000000..11ac006c3 --- /dev/null +++ b/meson/test cases/common/21 target arg/meson.build @@ -0,0 +1,9 @@ +project('local arg test', 'cpp', 'c') + +exe1 = executable('prog', 'prog.cc', 'func.c', \ +c_args : '-DCTHING', \ +cpp_args : '-DCPPTHING') +exe2 = executable('prog2', 'prog2.cc', 'func2.c') + +test('prog1', exe1) +test('prog2', exe2) diff --git a/meson/test cases/common/21 target arg/prog.cc b/meson/test cases/common/21 target arg/prog.cc new file mode 100644 index 000000000..23028af4d --- /dev/null +++ b/meson/test cases/common/21 target arg/prog.cc @@ -0,0 +1,13 @@ +#ifdef CTHING +#error "Wrong local argument set" +#endif + +#ifndef CPPTHING +#error "Local argument not set" +#endif + +extern "C" int func(); + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/21 target arg/prog2.cc b/meson/test cases/common/21 target arg/prog2.cc new file mode 100644 index 000000000..e2ffe62ad --- /dev/null +++ b/meson/test cases/common/21 target arg/prog2.cc @@ -0,0 +1,13 @@ +#ifdef CTHING +#error "Local C argument set in wrong target" +#endif + +#ifdef CPPTHING +#error "Local CPP argument set in wrong target" +#endif + +extern "C" int func(); + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/210 link custom_i multiple from multiple/generate_stlibs.py b/meson/test cases/common/210 link custom_i multiple from multiple/generate_stlibs.py new file mode 100644 index 000000000..5292006c5 --- /dev/null +++ b/meson/test cases/common/210 link custom_i multiple from multiple/generate_stlibs.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +import shutil, sys, subprocess, argparse, pathlib + +parser = argparse.ArgumentParser() + +parser.add_argument('--private-dir', required=True) +parser.add_argument('-o', nargs='+', required=True) +parser.add_argument('cmparr', nargs='+') + +contents = ['''#include + +void flob_1() { + printf("Now flobbing #1.\\n"); +} +''', '''#include + +void flob_2() { + printf("Now flobbing #2.\\n"); +} +'''] + +def generate_lib_gnulike(outfile, c_file, private_dir, compiler_array): + if shutil.which('ar'): + static_linker = 'ar' + elif shutil.which('llvm-ar'): + static_linker = 'llvm-ar' + elif shutil.which('gcc-ar'): + static_linker = 'gcc-ar' + else: + sys.exit('Could not detect a static linker.') + o_file = c_file.with_suffix('.o') + compile_cmd = compiler_array + ['-c', '-g', '-O2', '-o', str(o_file), str(c_file)] + subprocess.check_call(compile_cmd) + out_file = pathlib.Path(outfile) + if out_file.exists(): + out_file.unlink() + link_cmd = [static_linker, 'csr', outfile, str(o_file)] + subprocess.check_call(link_cmd) + return 0 + + +def generate_lib_msvc(outfile, c_file, private_dir, compiler_array): + static_linker = 'lib' + o_file = c_file.with_suffix('.obj') + compile_cmd = compiler_array + ['/MDd', + '/nologo', + '/ZI', + '/Ob0', + '/Od', + '/c', + '/Fo' + str(o_file), + str(c_file)] + subprocess.check_call(compile_cmd) + out_file = pathlib.Path(outfile) + if out_file.exists(): + out_file.unlink() + link_cmd = [static_linker, + '/nologo', + '/OUT:' + str(outfile), + str(o_file)] + subprocess.check_call(link_cmd) + return 0 + +def generate_lib(outfiles, private_dir, compiler_array): + private_dir = pathlib.Path(private_dir) + if not private_dir.exists(): + private_dir.mkdir() + + for i, content in enumerate(contents): + c_file = private_dir / ('flob_' + str(i + 1) + '.c') + c_file.write_text(content) + outfile = outfiles[i] + + cl_found = False + for cl_arg in compiler_array: + if (cl_arg.endswith('cl') or cl_arg.endswith('cl.exe')) and 'clang-cl' not in cl_arg: + ret = generate_lib_msvc(outfile, c_file, private_dir, compiler_array) + if ret > 0: + return ret + else: + cl_found = True + break + if not cl_found: + ret = generate_lib_gnulike(outfile, c_file, private_dir, compiler_array) + if ret > 0: + return ret + return 0 + +if __name__ == '__main__': + options = parser.parse_args() + sys.exit(generate_lib(options.o, options.private_dir, options.cmparr)) diff --git a/meson/test cases/common/210 link custom_i multiple from multiple/meson.build b/meson/test cases/common/210 link custom_i multiple from multiple/meson.build new file mode 100644 index 000000000..ede059ec6 --- /dev/null +++ b/meson/test cases/common/210 link custom_i multiple from multiple/meson.build @@ -0,0 +1,42 @@ +project('linkcustom', 'c') + +# This would require passing the static linker to the build script or having +# it detect it by itself. I'm too lazy to implement it now and it is not +# really needed for testing that custom targets work. It is the responsibility +# of the custom target to produce things in the correct format. +assert(not meson.is_cross_build(), + 'MESON_SKIP_TEST cross checking not implemented.') + +cc = meson.get_compiler('c') +genprog = find_program('generate_stlibs.py') + +clib = custom_target('linkcustom', + output: ['libflob_1.a', 'libflob_2.a'], + command: [genprog, + '-o', '@OUTPUT@', + '--private-dir', '@PRIVATE_DIR@'] + cc.cmd_array()) + +clibs = [clib[0], clib[1]] + +exe = executable('prog', 'prog.c', link_with: clibs) +test('linkcustom', exe) + +d = declare_dependency(link_with: clibs) + +exe2 = executable('prog2', 'prog.c', dependencies: d) +test('linkcustom2', exe2) + +# Link whole tests + +if meson.backend() == 'xcode' + message('Xcode does not support link whole so skipping.') + subdir_done() +endif + +exe3 = executable('prog3', 'prog.c', link_whole: clibs) +test('linkwhole', exe) + +d2 = declare_dependency(link_whole: clibs) + +exe4 = executable('prog4', 'prog.c', dependencies: d2) +test('linkwhole2', exe2) diff --git a/meson/test cases/common/210 link custom_i multiple from multiple/prog.c b/meson/test cases/common/210 link custom_i multiple from multiple/prog.c new file mode 100644 index 000000000..7b0c5cfc8 --- /dev/null +++ b/meson/test cases/common/210 link custom_i multiple from multiple/prog.c @@ -0,0 +1,8 @@ +void flob_1(void); +void flob_2(void); + +int main(void) { + flob_1(); + flob_2(); + return 0; +} diff --git a/meson/test cases/common/211 dependency get_variable method/meson.build b/meson/test cases/common/211 dependency get_variable method/meson.build new file mode 100644 index 000000000..384b3f3d2 --- /dev/null +++ b/meson/test cases/common/211 dependency get_variable method/meson.build @@ -0,0 +1,66 @@ +project( + 'dependency get_variable', + ['c', 'cpp'], +) + +# Just some string that nothing should return +default = 'asufoiqwjtl;adjfbpiuqwoehtl;ajdfl;ghal;sdjg' + +dep = dependency('zlib', method: 'pkg-config', required : false) +if not dep.found() + warning('Skipping pkg-config tests as zlib is not available or is not pkg-config') +else + # Test for regular pkg-config + # We don't know what the value will be, but we know it should be the same + dep = dependency('zlib', method : 'pkg-config') + assert(dep.get_pkgconfig_variable('prefix') == dep.get_variable(pkgconfig : 'prefix'), + 'Got different values from get_pkgconfig_variable and get_variable(pkgconfig: )') + assert(dep.get_variable(pkgconfig : default, default_value : default) == default, + 'pkg-config didn\'t get default when we should have.') + assert(dep.get_variable(pkgconfig : 'prefix', default_value : default) != default, + 'pkg-config got default when we shouldn\'t have.') +endif + +dep_ct = dependency('llvm', method : 'config-tool', required : false) +if not dep_ct.found() + warning('Skipping config-tool tests as llvm is not available or llvm-config was not found.') +else + assert(dep_ct.get_configtool_variable('has-rtti') == dep_ct.get_variable(configtool : 'has-rtti'), + 'Got different values from get_configtool_variable and get_variable(configtool: )') + assert(dep_ct.get_variable(configtool : default, default_value : default) == default, + 'config-tool didn\'t get default when we should have.') + assert(dep_ct.get_variable(configtool : 'has-rtti', default_value : default) != default, + 'config-tool got default when we shouldn\'t have.') +endif + +dep_cm = dependency('llvm', method : 'cmake', required : false) +if not dep_cm.found() + warning('Skipping cmake tests as llvm is not available via the cmake finder.') +else + if dep_ct.found() + assert((dep_cm.get_variable(cmake : 'LLVM_ENABLE_RTTI') == 'ON') == (dep_ct.get_variable(configtool : 'has-rtti') == 'YES'), + 'RTTI information for cmake and config tools disagree') + endif + assert(dep_cm.get_variable(cmake : default, default_value : default) == default, + 'cmake didn\'t get default when we should have.') + assert(dep_cm.get_variable(cmake : 'LLVM_ENABLE_RTTI', default_value : default) != default, + 'cmake config-tool got default when we shouldn\'t have.') +endif + +idep = declare_dependency(variables : {'foo' : 'value'}) +assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo', + internal : 'foo', default_value : default) == 'value', + 'internal got default when it shouldn\'t have.') +assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo', + internal : 'bar', default_value : default) == default, + 'internal didn\'t default when it should have.') + +idep = declare_dependency() +assert(idep.get_variable(pkgconfig : 'foo', cmake : 'foo', configtool : 'foo', + default_value : default) == default, + 'something went wrong with an InternalDependency with no variables.') + +idep = declare_dependency(variables : ['foo=value']) +assert(idep.get_variable(internal: 'foo') == 'value') +assert(idep.get_variable('foo') == 'value') +assert(idep.get_variable('invalid', internal: 'foo') == 'value') diff --git a/meson/test cases/common/212 source set configuration_data/a.c b/meson/test cases/common/212 source set configuration_data/a.c new file mode 100644 index 000000000..0570dffc4 --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/a.c @@ -0,0 +1,8 @@ +#include +#include "all.h" + +int main(void) +{ + if (p) abort(); + f(); +} diff --git a/meson/test cases/common/212 source set configuration_data/all.h b/meson/test cases/common/212 source set configuration_data/all.h new file mode 100644 index 000000000..e3547dfdc --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/all.h @@ -0,0 +1,9 @@ +extern void f(void); +extern void g(void); +extern void h(void); +extern void undefined(void); + +/* Defined in nope.c and f.c, + * value depends on the source set and configuration used. + */ +extern void (*p)(void); diff --git a/meson/test cases/common/212 source set configuration_data/f.c b/meson/test cases/common/212 source set configuration_data/f.c new file mode 100644 index 000000000..33d2f18f0 --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/f.c @@ -0,0 +1,7 @@ +#include "all.h" + +void (*p)(void) = (void *)0x12AB34CD; + +void f(void) +{ +} diff --git a/meson/test cases/common/212 source set configuration_data/g.c b/meson/test cases/common/212 source set configuration_data/g.c new file mode 100644 index 000000000..4a6f253b0 --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/g.c @@ -0,0 +1,6 @@ +#include "all.h" + +void g(void) +{ + h(); +} diff --git a/meson/test cases/common/212 source set configuration_data/meson.build b/meson/test cases/common/212 source set configuration_data/meson.build new file mode 100644 index 000000000..104f39d21 --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/meson.build @@ -0,0 +1,54 @@ +project('a', 'c') + +good = declare_dependency(link_with: static_library('good', 'g.c')) +bad = declare_dependency(link_args: 'nonexistent.a') +not_found = dependency('invalid', required: false) + +source_set = import('sourceset') + +sources = source_set.source_set() +sources.add(when: 'YES', if_false: ['nope.c']) +sources.add(when: 'YES1', if_true: files('a.c')) +subdir('subdir') +sources.add(when: 'NO', if_true: 'nope.c', if_false: ['f.c']) +sources.add(when: 'NO', if_true: bad, if_false: ['f.c']) + +sources.add(when: 'YES2', if_true: good) + +# dependencies as conditions +sources.add(when: not_found, if_true: 'nope.c') + +# test add_all +sources2 = source_set.source_set() +sources2.add(when: 'YES1', if_true: 'nope.c') +sources.add_all(when: 'NO', if_true: sources2) + +# test duplicate items +sources.add(when: 'YES1', if_true: files('a.c')) + +conf1 = configuration_data() +conf1.set10('YES', true) +conf1.set10('YES1', true) +conf1.set10('YES2', false) +conf1.set10('NO', false) +result1 = sources.apply(conf1) + +conf2 = configuration_data() +conf2.set10('YES', true) +conf2.set10('YES1', false) +conf2.set10('YES2', true) +conf2.set10('NO', false) +result2 = sources.apply(conf2) + +# Each target will recompile the objects +executable('first', sources: result1.sources(), dependencies: result1.dependencies()) +executable('second', sources: result2.sources(), dependencies: result2.dependencies()) + +# All target will use the same object files +if meson.is_unity() + message('Skipping extraction test because this is a Unity build.') +else + all_objs = static_library('all_objs', sources.all_sources()) + executable('first_via_lib', objects: all_objs.extract_objects(result1.sources()), dependencies: result1.dependencies()) + executable('second_via_lib', objects: all_objs.extract_objects(result2.sources()), dependencies: result2.dependencies()) +endif diff --git a/meson/test cases/common/212 source set configuration_data/nope.c b/meson/test cases/common/212 source set configuration_data/nope.c new file mode 100644 index 000000000..0ce1d3b1e --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/nope.c @@ -0,0 +1,3 @@ +#include "all.h" + +void (*p)(void) = undefined; diff --git a/meson/test cases/common/212 source set configuration_data/subdir/b.c b/meson/test cases/common/212 source set configuration_data/subdir/b.c new file mode 100644 index 000000000..31c378934 --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/subdir/b.c @@ -0,0 +1,13 @@ +#include +#include "all.h" + +void h(void) +{ +} + +int main(void) +{ + if (p) abort(); + f(); + g(); +} diff --git a/meson/test cases/common/212 source set configuration_data/subdir/meson.build b/meson/test cases/common/212 source set configuration_data/subdir/meson.build new file mode 100644 index 000000000..b497de574 --- /dev/null +++ b/meson/test cases/common/212 source set configuration_data/subdir/meson.build @@ -0,0 +1 @@ +sources.add(when: ['YES2', good], if_true: [ files('b.c') ]) diff --git a/meson/test cases/common/213 source set dictionary/a.c b/meson/test cases/common/213 source set dictionary/a.c new file mode 100644 index 000000000..0570dffc4 --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/a.c @@ -0,0 +1,8 @@ +#include +#include "all.h" + +int main(void) +{ + if (p) abort(); + f(); +} diff --git a/meson/test cases/common/213 source set dictionary/all.h b/meson/test cases/common/213 source set dictionary/all.h new file mode 100644 index 000000000..e3547dfdc --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/all.h @@ -0,0 +1,9 @@ +extern void f(void); +extern void g(void); +extern void h(void); +extern void undefined(void); + +/* Defined in nope.c and f.c, + * value depends on the source set and configuration used. + */ +extern void (*p)(void); diff --git a/meson/test cases/common/213 source set dictionary/f.c b/meson/test cases/common/213 source set dictionary/f.c new file mode 100644 index 000000000..9c5bb1cb9 --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/f.c @@ -0,0 +1,7 @@ +#include "all.h" + +void (*p)(void) = (void *)0x1234ABCD; + +void f(void) +{ +} diff --git a/meson/test cases/common/213 source set dictionary/g.c b/meson/test cases/common/213 source set dictionary/g.c new file mode 100644 index 000000000..4a6f253b0 --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/g.c @@ -0,0 +1,6 @@ +#include "all.h" + +void g(void) +{ + h(); +} diff --git a/meson/test cases/common/213 source set dictionary/meson.build b/meson/test cases/common/213 source set dictionary/meson.build new file mode 100644 index 000000000..9a3450707 --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/meson.build @@ -0,0 +1,56 @@ +project('a', 'c') + +good = declare_dependency(link_with: static_library('good', 'g.c')) +bad = declare_dependency(link_args: 'nonexistent.a') +not_found = dependency('invalid', required: false) + +source_set = import('sourceset') + +sources = source_set.source_set() +sources.add(when: 'YES', if_false: ['nope.c']) +sources.add(when: 'YES1', if_true: files('a.c')) +subdir('subdir') +sources.add(when: 'NO', if_true: 'nope.c', if_false: ['f.c']) +sources.add(when: 'NO', if_true: bad, if_false: ['f.c']) + +sources.add(when: 'YES2', if_true: good) + +# dependencies as conditions +sources.add(when: not_found, if_true: 'nope.c') + +# test add_all +sources2 = source_set.source_set() +sources2.add(when: 'YES1', if_true: 'nope.c') +sources.add_all(when: 'NO', if_true: sources2) + +# test duplicate items +sources.add(when: 'YES1', if_true: files('a.c')) + +conf1 = { + 'YES': true, + 'YES1': true, + 'YES2': false, + 'NO': false, +} +result1 = sources.apply(conf1) + +conf2 = { + 'YES': true, + 'YES1': false, + 'YES2': true, + 'NO': false, +} +result2 = sources.apply(conf2) + +# Each target will recompile the objects +executable('first', sources: result1.sources(), dependencies: result1.dependencies()) +executable('second', sources: result2.sources(), dependencies: result2.dependencies()) + +# All target will use the same object files +if meson.is_unity() + message('Skipping extraction test because this is a Unity build.') +else + all_objs = static_library('all_objs', sources.all_sources()) + executable('first_via_lib', objects: all_objs.extract_objects(result1.sources()), dependencies: result1.dependencies()) + executable('second_via_lib', objects: all_objs.extract_objects(result2.sources()), dependencies: result2.dependencies()) +endif diff --git a/meson/test cases/common/213 source set dictionary/nope.c b/meson/test cases/common/213 source set dictionary/nope.c new file mode 100644 index 000000000..0ce1d3b1e --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/nope.c @@ -0,0 +1,3 @@ +#include "all.h" + +void (*p)(void) = undefined; diff --git a/meson/test cases/common/213 source set dictionary/subdir/b.c b/meson/test cases/common/213 source set dictionary/subdir/b.c new file mode 100644 index 000000000..31c378934 --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/subdir/b.c @@ -0,0 +1,13 @@ +#include +#include "all.h" + +void h(void) +{ +} + +int main(void) +{ + if (p) abort(); + f(); + g(); +} diff --git a/meson/test cases/common/213 source set dictionary/subdir/meson.build b/meson/test cases/common/213 source set dictionary/subdir/meson.build new file mode 100644 index 000000000..b497de574 --- /dev/null +++ b/meson/test cases/common/213 source set dictionary/subdir/meson.build @@ -0,0 +1 @@ +sources.add(when: ['YES2', good], if_true: [ files('b.c') ]) diff --git a/meson/test cases/common/214 source set custom target/a.c b/meson/test cases/common/214 source set custom target/a.c new file mode 100644 index 000000000..39a3b6b38 --- /dev/null +++ b/meson/test cases/common/214 source set custom target/a.c @@ -0,0 +1,7 @@ +#include "all.h" + +int main(void) +{ + f(); + g(); +} diff --git a/meson/test cases/common/214 source set custom target/all.h b/meson/test cases/common/214 source set custom target/all.h new file mode 100644 index 000000000..5885e3204 --- /dev/null +++ b/meson/test cases/common/214 source set custom target/all.h @@ -0,0 +1,2 @@ +extern void f(void); +extern void g(void); diff --git a/meson/test cases/common/214 source set custom target/cp.py b/meson/test cases/common/214 source set custom target/cp.py new file mode 100644 index 000000000..cb09cf399 --- /dev/null +++ b/meson/test cases/common/214 source set custom target/cp.py @@ -0,0 +1,5 @@ +#! /usr/bin/env python3 + +import sys +from shutil import copyfile +copyfile(*sys.argv[1:]) diff --git a/meson/test cases/common/214 source set custom target/f.c b/meson/test cases/common/214 source set custom target/f.c new file mode 100644 index 000000000..a50ecda32 --- /dev/null +++ b/meson/test cases/common/214 source set custom target/f.c @@ -0,0 +1,5 @@ +#include "all.h" + +void f(void) +{ +} diff --git a/meson/test cases/common/214 source set custom target/g.c b/meson/test cases/common/214 source set custom target/g.c new file mode 100644 index 000000000..7098584c2 --- /dev/null +++ b/meson/test cases/common/214 source set custom target/g.c @@ -0,0 +1,5 @@ +#include "all.h" + +void g(void) +{ +} diff --git a/meson/test cases/common/214 source set custom target/meson.build b/meson/test cases/common/214 source set custom target/meson.build new file mode 100644 index 000000000..fe6e6e18b --- /dev/null +++ b/meson/test cases/common/214 source set custom target/meson.build @@ -0,0 +1,28 @@ +# Try using sourceset with various kinds of generated sources + +project('a', 'c') + +cp = find_program('cp.py') + +source_set = import('sourceset') +sources = source_set.source_set() + +a_c = custom_target('gen-custom-target', + input: 'a.c', output: 'out_a.c', + command: [cp, '@INPUT@', '@OUTPUT@']) +sources.add(when: 'YES', if_true: a_c) +sources.add(when: 'YES', if_true: a_c[0]) + +f_c = configure_file(input: 'f.c', output: 'out_f.c', copy: true) +sources.add(when: 'YES', if_true: f_c) +sources.add(when: 'YES', if_true: f_c) + +gen = generator(cp, output: 'out_@PLAINNAME@', arguments: ['@INPUT@', '@OUTPUT@']) +g_c = gen.process(files('g.c')) +sources.add(when: 'YES', if_true: g_c) +sources.add(when: 'YES', if_true: g_c) + +conf1 = { 'YES': true, } +result1 = sources.apply(conf1) + +executable('first', sources: result1.sources(), dependencies: result1.dependencies()) diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/aarch64.cc b/meson/test cases/common/215 source set realistic example/boards/arm/aarch64.cc new file mode 100644 index 000000000..386c771d8 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/aarch64.cc @@ -0,0 +1,8 @@ +#include "common.h" +#include + +void initialize_target() +{ + std::cout << ANSI_START << "some " << THE_TARGET + << " initialization" << ANSI_END << std::endl; +} diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/arm.cc b/meson/test cases/common/215 source set realistic example/boards/arm/arm.cc new file mode 100644 index 000000000..b463ebe7a --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/arm.cc @@ -0,0 +1,10 @@ +#include "arm.h" + +const char *ARMBoard::target() +{ + return THE_TARGET; +} + +void ARMBoard::some_arm_thing() +{ +} diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/arm.h b/meson/test cases/common/215 source set realistic example/boards/arm/arm.h new file mode 100644 index 000000000..4dd6b694f --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/arm.h @@ -0,0 +1,12 @@ +#ifndef ARM_H +#define ARM_H 1 + +#include "common.h" + +struct ARMBoard: Board { + const char *target(); + void some_arm_thing(); +}; + + +#endif diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/arm32.cc b/meson/test cases/common/215 source set realistic example/boards/arm/arm32.cc new file mode 100644 index 000000000..72a24274a --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/arm32.cc @@ -0,0 +1,8 @@ +#include "common.h" +#include + +void initialize_target() +{ + std::cout << ANSI_START << "a different " << THE_TARGET + << " initialization" << ANSI_END << std::endl; +} diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/versatilepb.cc b/meson/test cases/common/215 source set realistic example/boards/arm/versatilepb.cc new file mode 100644 index 000000000..3d1a9fe7c --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/versatilepb.cc @@ -0,0 +1,16 @@ +#include +#include "common.h" +#include "arm.h" + +struct VersatilePBBoard: ARMBoard { + void say_hello(); +}; + +void VersatilePBBoard::say_hello() +{ + some_arm_thing(); + std::cout << ANSI_START << "I am the versatilepb board" + << ANSI_END << std::endl; +} + +static VersatilePBBoard versatilepb; diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/virt.cc b/meson/test cases/common/215 source set realistic example/boards/arm/virt.cc new file mode 100644 index 000000000..6f9a1ca94 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/virt.cc @@ -0,0 +1,16 @@ +#include +#include "common.h" +#include "arm.h" + +struct VirtBoard: ARMBoard { + void say_hello(); +}; + +void VirtBoard::say_hello() +{ + some_arm_thing(); + std::cout << ANSI_START << "I am the virt board" + << ANSI_END << std::endl; +} + +static VirtBoard virt; diff --git a/meson/test cases/common/215 source set realistic example/boards/arm/xlnx_zcu102.cc b/meson/test cases/common/215 source set realistic example/boards/arm/xlnx_zcu102.cc new file mode 100644 index 000000000..8921e0074 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/arm/xlnx_zcu102.cc @@ -0,0 +1,16 @@ +#include +#include "common.h" +#include "arm.h" + +struct XlnxZCU102Board: ARMBoard { + void say_hello(); +}; + +void XlnxZCU102Board::say_hello() +{ + some_arm_thing(); + std::cout << ANSI_START << "I am the xlnx_zcu102 board" + << ANSI_END << std::endl; +} + +static XlnxZCU102Board xlnx_zcu102; diff --git a/meson/test cases/common/215 source set realistic example/boards/meson.build b/meson/test cases/common/215 source set realistic example/boards/meson.build new file mode 100644 index 000000000..41ead4c3e --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/meson.build @@ -0,0 +1,7 @@ +specific.add(when: 'TARGET_ARM', if_true: files('arm/arm.cc', 'arm/arm32.cc')) +specific.add(when: 'TARGET_AARCH64', if_true: files('arm/arm.cc', 'arm/aarch64.cc')) +specific.add(when: 'CONFIG_VIRT', if_true: files('arm/virt.cc')) +specific.add(when: 'CONFIG_XLNX_ZCU102', if_true: files('arm/xlnx_zcu102.cc')) +specific.add(when: 'CONFIG_VERSATILEPB', if_true: files('arm/versatilepb.cc')) + +specific.add(when: 'TARGET_X86', if_true: files('x86/pc.cc')) diff --git a/meson/test cases/common/215 source set realistic example/boards/x86/pc.cc b/meson/test cases/common/215 source set realistic example/boards/x86/pc.cc new file mode 100644 index 000000000..04ec39262 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/boards/x86/pc.cc @@ -0,0 +1,26 @@ +#include +#include "common.h" + +struct X86Board: Board { + const char *target(); + void say_hello(); +}; + +const char *X86Board::target() +{ + return THE_TARGET; +} + +void X86Board::say_hello() +{ + std::cout << ANSI_START << "I am a 1996 PC" + << ANSI_END << std::endl; +} + +void initialize_target() +{ + std::cout << ANSI_START << "ready, set, go" + << ANSI_END << std::endl; +} + +static X86Board pc; diff --git a/meson/test cases/common/215 source set realistic example/common.h b/meson/test cases/common/215 source set realistic example/common.h new file mode 100644 index 000000000..6e325c7bc --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/common.h @@ -0,0 +1,41 @@ +#ifndef COMMON_H +#define COMMON_H 1 + +/* + * target-specific code will print in yellow, common code will print + * in grey. + */ +#ifdef THE_TARGET +#define ANSI_START "\x1b[33;1m" +#define ANSI_END "\x1b[0m" +#else +#define ANSI_START "" +#define ANSI_END "" +#endif + +void some_random_function(); +void initialize_target(); + +struct Board { + Board *next; + Board(); + virtual ~Board(); + virtual void say_hello() = 0; + virtual const char *target() = 0; +}; + +struct Device { + Device *next; + Device(); + virtual ~Device(); + virtual void say_hello() = 0; +}; + +struct Dependency { + Dependency *next; + Dependency(); + virtual ~Dependency(); + virtual void initialize() = 0; +}; + +#endif diff --git a/meson/test cases/common/215 source set realistic example/config/aarch64 b/meson/test cases/common/215 source set realistic example/config/aarch64 new file mode 100644 index 000000000..55b90ebee --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/config/aarch64 @@ -0,0 +1,5 @@ +TARGET_AARCH64=y +CONFIG_VIRT=y +CONFIG_XLNX_ZCU102=y +CONFIG_VIRTIO=y +CONFIG_VIRTIO_MMIO=y diff --git a/meson/test cases/common/215 source set realistic example/config/arm b/meson/test cases/common/215 source set realistic example/config/arm new file mode 100644 index 000000000..d3f7ac761 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/config/arm @@ -0,0 +1,3 @@ +TARGET_ARM=y +CONFIG_VIRT=y +CONFIG_VERSATILEPB=y diff --git a/meson/test cases/common/215 source set realistic example/config/x86 b/meson/test cases/common/215 source set realistic example/config/x86 new file mode 100644 index 000000000..6caa3e244 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/config/x86 @@ -0,0 +1,4 @@ +TARGET_X86=y +CONFIG_PC=y +CONFIG_VIRTIO=y +CONFIG_VIRTIO_PCI=y diff --git a/meson/test cases/common/215 source set realistic example/devices/meson.build b/meson/test cases/common/215 source set realistic example/devices/meson.build new file mode 100644 index 000000000..68ee68eec --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/devices/meson.build @@ -0,0 +1,3 @@ +specific.add(when: 'CONFIG_VIRTIO', if_true: files('virtio.cc')) +common.add(when: 'CONFIG_VIRTIO_PCI', if_true: files('virtio-pci.cc')) +common.add(when: 'CONFIG_VIRTIO_MMIO', if_true: files('virtio-mmio.cc')) diff --git a/meson/test cases/common/215 source set realistic example/devices/virtio-mmio.cc b/meson/test cases/common/215 source set realistic example/devices/virtio-mmio.cc new file mode 100644 index 000000000..5dab97e5c --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/devices/virtio-mmio.cc @@ -0,0 +1,16 @@ +#include +#include "common.h" +#include "virtio.h" + +struct VirtioMMIODevice: VirtioDevice { + void say_hello(); +}; + +void VirtioMMIODevice::say_hello() +{ + some_virtio_thing(); + std::cout << ANSI_START << "virtio-mmio is available" + << ANSI_END << std::endl; +} + +static VirtioMMIODevice virtio_mmio; diff --git a/meson/test cases/common/215 source set realistic example/devices/virtio-pci.cc b/meson/test cases/common/215 source set realistic example/devices/virtio-pci.cc new file mode 100644 index 000000000..7df7a82b7 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/devices/virtio-pci.cc @@ -0,0 +1,16 @@ +#include +#include "common.h" +#include "virtio.h" + +struct VirtioPCIDevice: VirtioDevice { + void say_hello(); +}; + +void VirtioPCIDevice::say_hello() +{ + some_virtio_thing(); + std::cout << ANSI_START << "virtio-pci is available" + << ANSI_END << std::endl; +} + +static VirtioPCIDevice virtio_pci; diff --git a/meson/test cases/common/215 source set realistic example/devices/virtio.cc b/meson/test cases/common/215 source set realistic example/devices/virtio.cc new file mode 100644 index 000000000..fc51275bf --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/devices/virtio.cc @@ -0,0 +1,6 @@ +#include +#include "common.h" +#include "virtio.h" + +void VirtioDevice::some_virtio_thing() { +} diff --git a/meson/test cases/common/215 source set realistic example/devices/virtio.h b/meson/test cases/common/215 source set realistic example/devices/virtio.h new file mode 100644 index 000000000..a157731fe --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/devices/virtio.h @@ -0,0 +1,10 @@ +#ifndef VIRTIO_H +#define VIRTIO_H 1 + +#include "common.h" + +struct VirtioDevice: Device { + void some_virtio_thing(); +}; + +#endif diff --git a/meson/test cases/common/215 source set realistic example/main.cc b/meson/test cases/common/215 source set realistic example/main.cc new file mode 100644 index 000000000..2b552176b --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/main.cc @@ -0,0 +1,32 @@ +#include +#include +#include "common.h" + +Board* boards; +Device* devices; +Dependency* deps; + +Board::Board() { this->next = boards; boards = this; } +Board::~Board() {} + +Device::Device() { this->next = devices; devices = this; } +Device::~Device() {} + +Dependency::Dependency() { this->next = deps; deps = this; } +Dependency::~Dependency() {} + +int main(void) +{ + some_random_function(); + for (auto d = deps; d; d = d->next) + d->initialize(); + + initialize_target(); + for (auto b = boards; b; b = b->next) { + std::cout << ANSI_START << b->target() << " - " << ANSI_END; + b->say_hello(); + } + + for (auto d = devices; d; d = d->next) + d->say_hello(); +} diff --git a/meson/test cases/common/215 source set realistic example/meson.build b/meson/test cases/common/215 source set realistic example/meson.build new file mode 100644 index 000000000..d986b991a --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/meson.build @@ -0,0 +1,52 @@ +# a sort-of realistic example that combines the sourceset and keyval +# modules, inspired by QEMU's build system + +project('sourceset-example', 'cpp', default_options: ['cpp_std=c++11']) + +cppid = meson.get_compiler('cpp').get_id() +if cppid == 'pgi' + error('MESON_SKIP_TEST: Even PGI 19.4 that claims C++17 full support, cannot handle auto x = y syntax used in this test.') +endif + +ss = import('sourceset') +keyval = import('keyval') + +zlib = declare_dependency(compile_args: '-DZLIB=1') +another = declare_dependency(compile_args: '-DANOTHER=1') +not_found = dependency('not-found', required: false) + +common = ss.source_set() +specific = ss.source_set() + +common.add(files('main.cc')) +common.add(when: [zlib, another], if_true: files('zlib.cc')) +common.add(when: not_found, + if_true: files('was-found.cc'), + if_false: files('not-found.cc')) + +subdir('boards') +subdir('devices') + +if meson.is_unity() + specific.add_all(common) + common = ss.source_set() +endif + +common_lib = static_library('common', common.all_sources(), + dependencies: common.all_dependencies()) + +targets = [ 'arm', 'aarch64', 'x86' ] +target_dirs = { 'arm' : 'arm', 'aarch64' : 'arm', 'x86': 'x86' } + +foreach x : targets + config = keyval.load('config' / x) + target_specific = specific.apply(config, strict: false) + target_common = common.apply(config, strict: false) + target_deps = target_specific.dependencies() + target_common.dependencies() + executable(x, + objects: common_lib.extract_objects(target_common.sources()), + sources: target_specific.sources(), + dependencies: target_deps, + include_directories: 'boards' / target_dirs[x], + cpp_args: '-DTHE_TARGET="' + x + '"') +endforeach diff --git a/meson/test cases/common/215 source set realistic example/not-found.cc b/meson/test cases/common/215 source set realistic example/not-found.cc new file mode 100644 index 000000000..955a7a2a6 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/not-found.cc @@ -0,0 +1,8 @@ +#include +#include "common.h" + +void some_random_function() +{ + std::cout << ANSI_START << "everything's alright" + << ANSI_END << std::endl; +} diff --git a/meson/test cases/common/215 source set realistic example/was-found.cc b/meson/test cases/common/215 source set realistic example/was-found.cc new file mode 100644 index 000000000..f1eaf1ea5 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/was-found.cc @@ -0,0 +1,7 @@ +#include + +void some_random_function() +{ + std::cout << ANSI_START << "huh?" + << ANSI_END << std::endl; +} diff --git a/meson/test cases/common/215 source set realistic example/zlib.cc b/meson/test cases/common/215 source set realistic example/zlib.cc new file mode 100644 index 000000000..434e0b7a6 --- /dev/null +++ b/meson/test cases/common/215 source set realistic example/zlib.cc @@ -0,0 +1,15 @@ +#include +#include "common.h" + +struct ZLibDependency : Dependency { + void initialize(); +}; + +void ZLibDependency::initialize() { + if (ZLIB && ANOTHER) { + std::cout << ANSI_START << "hello from zlib" + << ANSI_END << std::endl; + } +} + +ZLibDependency zlib; diff --git a/meson/test cases/common/216 custom target input extracted objects/check_object.py b/meson/test cases/common/216 custom target input extracted objects/check_object.py new file mode 100644 index 000000000..bafcf2c17 --- /dev/null +++ b/meson/test cases/common/216 custom target input extracted objects/check_object.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +import sys, os + +if __name__ == '__main__': + if len(sys.argv) != 3: + print(sys.argv[0], 'object', 'output') + sys.exit(1) + elif os.path.exists(sys.argv[1]): + with open(sys.argv[2], 'wb') as out: + pass + else: + sys.exit(1) diff --git a/meson/test cases/common/216 custom target input extracted objects/libdir/meson.build b/meson/test cases/common/216 custom target input extracted objects/libdir/meson.build new file mode 100644 index 000000000..7f833115c --- /dev/null +++ b/meson/test cases/common/216 custom target input extracted objects/libdir/meson.build @@ -0,0 +1 @@ +objlib = static_library('object', 'source.c', override_options : ['unity=off']) diff --git a/meson/test cases/common/216 custom target input extracted objects/libdir/source.c b/meson/test cases/common/216 custom target input extracted objects/libdir/source.c new file mode 100644 index 000000000..1dc08e168 --- /dev/null +++ b/meson/test cases/common/216 custom target input extracted objects/libdir/source.c @@ -0,0 +1,3 @@ +int func1_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/216 custom target input extracted objects/meson.build b/meson/test cases/common/216 custom target input extracted objects/meson.build new file mode 100644 index 000000000..8f8b22f23 --- /dev/null +++ b/meson/test cases/common/216 custom target input extracted objects/meson.build @@ -0,0 +1,18 @@ +project('custom target input extracted objects', 'c') + +if meson.backend() == 'xcode' + error('MESON_SKIP_TEST: sometimes Xcode puts object files in weird paths and we can not extract them.') +endif + + +checker = find_program('check_object.py') + +cc = meson.get_compiler('c').cmd_array().get(-1) + +subdir('libdir') + +custom_target('check', + input: objlib.extract_objects('source.c'), + output: 'objcheck', + command: [checker, '@INPUT@', '@OUTPUT@'], + build_by_default: true) diff --git a/meson/test cases/common/217 test priorities/meson.build b/meson/test cases/common/217 test priorities/meson.build new file mode 100644 index 000000000..33c6fa382 --- /dev/null +++ b/meson/test cases/common/217 test priorities/meson.build @@ -0,0 +1,22 @@ +project('test priorities', 'c') + +test_prog = find_program('testprog.py') + +test('priority 0', test_prog, + args : ['0'], +) + +test('priority neg 10', test_prog, + args : ['-10'], + priority : -10 +) + +test('priority 1000', test_prog, + args : ['1000'], + priority : 1000 +) + +test('priority 50', test_prog, + args : ['50'], + priority : 50 +) diff --git a/meson/test cases/common/217 test priorities/testprog.py b/meson/test cases/common/217 test priorities/testprog.py new file mode 100644 index 000000000..470f28c3b --- /dev/null +++ b/meson/test cases/common/217 test priorities/testprog.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +import sys + +print(sys.argv[1]) diff --git a/meson/test cases/common/218 include_dir dot/meson.build b/meson/test cases/common/218 include_dir dot/meson.build new file mode 100644 index 000000000..71f7189c5 --- /dev/null +++ b/meson/test cases/common/218 include_dir dot/meson.build @@ -0,0 +1,8 @@ +project('Include Here', 'c') + +# The layout with the .h file in . and the .c files in src/ is critical to +# tickle the bug #5847 + +inc = include_directories('.') + +subdir('src') \ No newline at end of file diff --git a/meson/test cases/common/218 include_dir dot/rone.h b/meson/test cases/common/218 include_dir dot/rone.h new file mode 100644 index 000000000..48d7a79e8 --- /dev/null +++ b/meson/test cases/common/218 include_dir dot/rone.h @@ -0,0 +1 @@ +int rOne(void); \ No newline at end of file diff --git a/meson/test cases/common/218 include_dir dot/src/main.c b/meson/test cases/common/218 include_dir dot/src/main.c new file mode 100644 index 000000000..192b783f6 --- /dev/null +++ b/meson/test cases/common/218 include_dir dot/src/main.c @@ -0,0 +1,5 @@ +#include "rone.h" + +int main(void) { + return rOne(); +} diff --git a/meson/test cases/common/218 include_dir dot/src/meson.build b/meson/test cases/common/218 include_dir dot/src/meson.build new file mode 100644 index 000000000..fcbefb0b9 --- /dev/null +++ b/meson/test cases/common/218 include_dir dot/src/meson.build @@ -0,0 +1,6 @@ +t = executable( + 'main', + ['main.c', 'rone.c'], + include_directories : inc, + implicit_include_directories : false, +) \ No newline at end of file diff --git a/meson/test cases/common/218 include_dir dot/src/rone.c b/meson/test cases/common/218 include_dir dot/src/rone.c new file mode 100644 index 000000000..63cb0d319 --- /dev/null +++ b/meson/test cases/common/218 include_dir dot/src/rone.c @@ -0,0 +1,3 @@ +int rOne(void) { + return 1; +} \ No newline at end of file diff --git a/meson/test cases/common/219 include_type dependency/main.cpp b/meson/test cases/common/219 include_type dependency/main.cpp new file mode 100644 index 000000000..bf8c4a423 --- /dev/null +++ b/meson/test cases/common/219 include_type dependency/main.cpp @@ -0,0 +1,8 @@ +#include +#include + +using namespace std; + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/219 include_type dependency/meson.build b/meson/test cases/common/219 include_type dependency/meson.build new file mode 100644 index 000000000..678fb4edf --- /dev/null +++ b/meson/test cases/common/219 include_type dependency/meson.build @@ -0,0 +1,44 @@ +project( + 'dependency include_type', + ['c', 'cpp'], +) + +dep = dependency('zlib', method: 'pkg-config', required : false) +boost_dep = dependency('boost', modules: ['graph'], include_type : 'system', required: false) + +if not dep.found() + error('MESON_SKIP_TEST zlib was not found') +endif + +if not boost_dep.found() + error('MESON_SKIP_TEST boost was not found') +endif + +assert(dep.include_type() == 'preserve', 'include_type must default to "preserve"') + +dep_sys = dep.as_system() +assert(dep_sys.include_type() == 'system', 'as_system must return a system dep') + +dep2 = dependency('zlib', method: 'pkg-config', include_type : 'system') +assert(dep2.include_type() == 'system', 'include_type must be true when set') + +dep2_sys = dep2.as_system('non-system') +assert(dep2_sys.include_type() == 'non-system', 'as_system must set include_type correctly') + +sp = subproject('subDep') +sp_dep = sp.get_variable('subDep_dep') +assert(sp_dep.include_type() == 'preserve', 'default is preserve') + +sp_dep_sys = sp_dep.as_system('system') +assert(sp_dep_sys.include_type() == 'system', 'changing include_type works') +assert(sp_dep.include_type() == 'preserve', 'as_system must not mutate the original object') + +fallback = dependency('sdffgagf_does_not_exist', include_type: 'system', fallback: ['subDep', 'subDep_dep']) +assert(fallback.include_type() == 'system', 'include_type works with dependency fallback') + +fallback_empty = dependency('', include_type: 'system', fallback: ['subDep', 'subDep_dep']) +assert(fallback_empty.include_type() == 'system', 'include_type works with empty name dependency fallback') + +# Check that PCH works with `include_type : 'system'` See https://github.com/mesonbuild/meson/issues/7167 +main_exe = executable('main_exe', 'main.cpp', cpp_pch: 'pch/test.hpp', dependencies: boost_dep) +test('main_test', main_exe) diff --git a/meson/test cases/common/219 include_type dependency/pch/test.hpp b/meson/test cases/common/219 include_type dependency/pch/test.hpp new file mode 100644 index 000000000..0d40fe1b9 --- /dev/null +++ b/meson/test cases/common/219 include_type dependency/pch/test.hpp @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/common/219 include_type dependency/subprojects/subDep/meson.build b/meson/test cases/common/219 include_type dependency/subprojects/subDep/meson.build new file mode 100644 index 000000000..c3e87c469 --- /dev/null +++ b/meson/test cases/common/219 include_type dependency/subprojects/subDep/meson.build @@ -0,0 +1,3 @@ +project('subDep', ['cpp']) + +subDep_dep = declare_dependency(compile_args : []) diff --git a/meson/test cases/common/22 object extraction/check-obj.py b/meson/test cases/common/22 object extraction/check-obj.py new file mode 100644 index 000000000..99c2cc546 --- /dev/null +++ b/meson/test cases/common/22 object extraction/check-obj.py @@ -0,0 +1,21 @@ +#! /usr/bin/env python3 + +import json +import sys +import os + +cc = None +output = None + +# Only the ninja backend produces compile_commands.json +if sys.argv[1] == 'ninja': + with open('compile_commands.json', 'r') as f: + cc = json.load(f) + output = set((x['output'] for x in cc)) + +for obj in sys.argv[2:]: + if not os.path.exists(obj): + sys.exit(1) + if sys.argv[1] == 'ninja' and obj not in output: + sys.exit(1) + print('Verified', obj) diff --git a/meson/test cases/common/22 object extraction/header.h b/meson/test cases/common/22 object extraction/header.h new file mode 100644 index 000000000..50403ce3c --- /dev/null +++ b/meson/test cases/common/22 object extraction/header.h @@ -0,0 +1 @@ +/* Check that extract_all_objects works with headers. */ diff --git a/meson/test cases/common/22 object extraction/lib.c b/meson/test cases/common/22 object extraction/lib.c new file mode 100644 index 000000000..81805512f --- /dev/null +++ b/meson/test cases/common/22 object extraction/lib.c @@ -0,0 +1,3 @@ +int func(void) { + return 42; +} diff --git a/meson/test cases/common/22 object extraction/lib2.c b/meson/test cases/common/22 object extraction/lib2.c new file mode 100644 index 000000000..5020593c6 --- /dev/null +++ b/meson/test cases/common/22 object extraction/lib2.c @@ -0,0 +1,3 @@ +int retval(void) { + return 43; +} diff --git a/meson/test cases/common/22 object extraction/main.c b/meson/test cases/common/22 object extraction/main.c new file mode 100644 index 000000000..27162c5e0 --- /dev/null +++ b/meson/test cases/common/22 object extraction/main.c @@ -0,0 +1,5 @@ +int func(void); + +int main(void) { + return func() == 42 ? 0 : 1; +} diff --git a/meson/test cases/common/22 object extraction/meson.build b/meson/test cases/common/22 object extraction/meson.build new file mode 100644 index 000000000..4847fa11d --- /dev/null +++ b/meson/test cases/common/22 object extraction/meson.build @@ -0,0 +1,35 @@ +project('object extraction', 'c') + +if meson.is_unity() + message('Skipping extraction test because this is a Unity build.') +else + lib1 = library('somelib', 'src/lib.c') + lib2 = library('somelib2', 'lib.c', 'header.h', 'lib2.c') + + obj1 = lib1.extract_objects('src/lib.c') + obj2 = lib2.extract_objects(['lib.c']) + obj3 = lib2.extract_objects(files('lib.c')) + obj4 = lib2.extract_objects(['lib.c', 'lib.c']) + obj5 = lib2.extract_objects(['lib.c', 'header.h']) + obj6 = lib2.extract_all_objects(recursive: true) + + e1 = executable('main1', 'main.c', objects : obj1) + e2 = executable('main2', 'main.c', objects : obj2) + e3 = executable('main3', 'main.c', objects : obj3) + e4 = executable('main4', 'main.c', objects : obj4) + e5 = executable('main5', 'main.c', objects : obj5) + e6 = executable('main6', 'main.c', objects : obj6) + + custom_target('custom_target with object inputs', output: 'objs', + input: [obj1, obj2, obj3, obj5, obj6], + build_by_default: true, + command: [find_program('check-obj.py'), meson.backend(), '@INPUT@'], + capture: true) + + test('extraction test 1', e1) + test('extraction test 2', e2) + test('extraction test 3', e3) + test('extraction test 4', e4) + test('extraction test 5', e5) + test('extraction test 6', e6) +endif diff --git a/meson/test cases/common/22 object extraction/src/lib.c b/meson/test cases/common/22 object extraction/src/lib.c new file mode 100644 index 000000000..81805512f --- /dev/null +++ b/meson/test cases/common/22 object extraction/src/lib.c @@ -0,0 +1,3 @@ +int func(void) { + return 42; +} diff --git a/meson/test cases/common/220 fs module/meson.build b/meson/test cases/common/220 fs module/meson.build new file mode 100644 index 000000000..7d10eb5fa --- /dev/null +++ b/meson/test cases/common/220 fs module/meson.build @@ -0,0 +1,144 @@ +project('fs module test') + +is_windows = build_machine.system() == 'windows' + +fs = import('fs') + +f = files('meson.build') + +assert(fs.exists('meson.build'), 'Existing file reported as missing.') +assert(not fs.exists('nonexisting'), 'Nonexisting file was found.') + +if not is_windows and build_machine.system() != 'cygwin' + # Symlinks on Windows have specific requirements including: + # * Meson running under Python >= 3.8 + # * Windows user permissions to create symlinks, and/or Windows in Developer mode + # so at this time the symlink test is skipped for Windows. + symlink = meson.current_build_dir() / 'a_symlink' + run_command('ln', '-s', meson.current_source_dir() / 'meson.build', symlink) + assert(fs.is_symlink(symlink), 'Symlink not detected.') + assert(not fs.is_symlink('meson.build'), 'Regular file detected as symlink.') + assert(not fs.is_symlink(f[0]), 'Regular file detected as symlink.') +endif + +assert(fs.is_file('meson.build'), 'File not detected as a file.') +assert(not fs.is_file('subprojects'), 'Directory detected as a file.') +assert(not fs.is_file('nonexisting'), 'Bad path detected as a file.') + +assert(fs.is_dir('subprojects'), 'Dir not detected correctly.') +assert(not fs.is_dir('meson.build'), 'File detected as a dir.') +assert(not fs.is_dir('nonexisting'), 'Bad path detected as a dir.') + +assert(fs.is_dir('~'), 'home directory not detected') +assert(not fs.is_file('~'), 'home directory detected as file') + +# -- expanduser +assert(fs.expanduser('~') != '~','expanduser failed') +assert(fs.expanduser('~/foo').endswith('foo'), 'expanduser with tail failed') + +# -- as_posix +assert(fs.as_posix('/') == '/', 'as_posix idempotent') +assert(fs.as_posix('\\') == '/', 'as_posix simple') +assert(fs.as_posix('\\\\') == '/', 'as_posix simple') +assert(fs.as_posix('foo\\bar/baz') == 'foo/bar/baz', 'as_posix mixed slash') + +# -- is_absolute +winabs = 'q:/foo' +unixabs = '/foo' +if is_windows + assert(fs.is_absolute(winabs), 'is_absolute windows not detected') + assert(not fs.is_absolute(unixabs), 'is_absolute unix false positive') +else + assert(fs.is_absolute(unixabs), 'is_absolute unix not detected') + assert(not fs.is_absolute(winabs), 'is_absolute windows false positive') +endif + +# -- replace_suffix + +original = 'foo' +assert(fs.replace_suffix(original, '') == original, 'replace_suffix idempotent') +assert(fs.replace_suffix(f[0], '') == 'meson', 'replace_suffix trim') + +original = 'foo.txt' +new = fs.replace_suffix(original, '.ini') +assert(new == 'foo.ini', 'replace_suffix failed') + +new = fs.replace_suffix(f[0], '.ini') +assert(new == 'meson.ini', 'replace_suffix failed') + +original = 'foo' +new = fs.replace_suffix(original, '.ini') +assert(new == 'foo.ini', 'replace_suffix did not add suffix to suffixless file') + +original = 'foo.dll.a' +new = fs.replace_suffix(original, '.so') +assert(new == 'foo.dll.so', 'replace_suffix did not only modify last suffix') + +original = 'foo.dll' +new = fs.replace_suffix(original, '') +assert(new == 'foo', 'replace_suffix did not only delete last suffix') + +# `/` on windows is interpreted like `.drive` which in general may not be `c:/` +# the files need not exist for fs.replace_suffix() +original = is_windows ? 'j:/foo/bar.txt' : '/foo/bar.txt' +new_check = is_windows ? 'j:\\foo\\bar.ini' : '/foo/bar.ini' + +new = fs.replace_suffix(original, '.ini') +assert(new == new_check, 'absolute path replace_suffix failed') + +# -- hash + +md5 = fs.hash('subdir/subdirfile.txt', 'md5') +sha256 = fs.hash('subdir/subdirfile.txt', 'sha256') +assert(md5 == 'd0795db41614d25affdd548314b30b3b', 'md5sum did not match') +assert(sha256 == 'be2170b0dae535b73f6775694fffa3fd726a43b5fabea11b7342f0605917a42a', 'sha256sum did not match') + +f = files('subdir/subdirfile.txt') +md5 = fs.hash(f[0], 'md5') +assert(md5 == 'd0795db41614d25affdd548314b30b3b', 'md5sum did not match') +sha256 = fs.hash(f[0], 'sha256') +assert(sha256 == 'be2170b0dae535b73f6775694fffa3fd726a43b5fabea11b7342f0605917a42a', 'sha256sum did not match') + +# -- size + +size = fs.size('subdir/subdirfile.txt') +assert(size == 19, 'file size not found correctly') + +size = fs.size(f[0]) +assert(size == 19, 'file size not found correctly') + +# -- are filenames referring to the same file? +f1 = 'meson.build' +f2 = 'subdir/../meson.build' +assert(fs.is_samepath(f1, f2), 'is_samepath not detecting same files') +assert(fs.is_samepath(meson.source_root(), 'subdir/..'), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.project_source_root(), 'subdir/..'), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir() / 'subdir/..'), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.global_source_root(), meson.current_source_dir()), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.global_build_root(), meson.current_build_dir()), 'is_samepath not detecting same directory') +assert(not fs.is_samepath(f1, 'subdir/subdirfile.txt'), 'is_samepath known bad comparison') +assert(not fs.is_samepath('not-a-path', f2), 'is_samepath should not error if path(s) do not exist') + +f = files('meson.build', 'subdir/../meson.build') +assert(fs.is_samepath(f[0], f[1]), 'is_samepath not detercting same files') + +if not is_windows and build_machine.system() != 'cygwin' + assert(fs.is_samepath(symlink, 'meson.build'), 'symlink is_samepath fail') +endif + +# parts of path +assert(fs.parent('foo/bar') == 'foo', 'failed to get dirname') +if not is_windows +assert(fs.parent(f[1]) == 'subdir/..', 'failed to get dirname') +else +assert(fs.parent(f[1]) == 'subdir\..', 'failed to get dirname') +endif +assert(fs.name('foo/bar') == 'bar', 'failed to get basename') +assert(fs.name(f[1]) == 'meson.build', 'failed to get basename') +assert(fs.name('foo/bar/baz.dll.a') == 'baz.dll.a', 'failed to get basename with compound suffix') +assert(fs.stem('foo/bar/baz.dll') == 'baz', 'failed to get stem with suffix') +assert(fs.stem('foo/bar/baz.dll.a') == 'baz.dll', 'failed to get stem with compound suffix') + +subdir('subdir') + +subproject('subbie') diff --git a/meson/test cases/common/220 fs module/subdir/meson.build b/meson/test cases/common/220 fs module/subdir/meson.build new file mode 100644 index 000000000..0cd2475e9 --- /dev/null +++ b/meson/test cases/common/220 fs module/subdir/meson.build @@ -0,0 +1,6 @@ +subdirfiles = files('subdirfile.txt') +assert(fs.exists('subdirfile.txt'), 'Subdir file lookup is broken.') +assert(fs.is_samepath(meson.project_source_root(), '..'), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir() / '..'), 'is_samepath not detecting same directory') + +assert(fs.is_samepath(subdirfiles[0], 'subdirfile.txt'), 'is_samepath not detecting same directory when using File and str') diff --git a/meson/test cases/common/220 fs module/subdir/subdirfile.txt b/meson/test cases/common/220 fs module/subdir/subdirfile.txt new file mode 100644 index 000000000..bcf7cc04b --- /dev/null +++ b/meson/test cases/common/220 fs module/subdir/subdirfile.txt @@ -0,0 +1 @@ +I have no content. diff --git a/meson/test cases/common/220 fs module/subprojects/subbie/meson.build b/meson/test cases/common/220 fs module/subprojects/subbie/meson.build new file mode 100644 index 000000000..f1e21ea10 --- /dev/null +++ b/meson/test cases/common/220 fs module/subprojects/subbie/meson.build @@ -0,0 +1,11 @@ +project('subbie') + +fs = import('fs') + +assert(fs.exists('subprojectfile.txt'), 'Subproject root file not found.') +assert(fs.is_samepath(meson.project_source_root(), meson.current_source_dir()), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir()), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.global_source_root(), meson.current_source_dir() / '../..'), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.global_build_root(), meson.current_build_dir() / '../..'), 'is_samepath not detecting same directory') + +subdir('subsub') diff --git a/meson/test cases/common/220 fs module/subprojects/subbie/subprojectfile.txt b/meson/test cases/common/220 fs module/subprojects/subbie/subprojectfile.txt new file mode 100644 index 000000000..bedb84cc4 --- /dev/null +++ b/meson/test cases/common/220 fs module/subprojects/subbie/subprojectfile.txt @@ -0,0 +1 @@ +I'm not empty. So there's at least that. diff --git a/meson/test cases/common/220 fs module/subprojects/subbie/subsub/meson.build b/meson/test cases/common/220 fs module/subprojects/subbie/subsub/meson.build new file mode 100644 index 000000000..4ac68aea5 --- /dev/null +++ b/meson/test cases/common/220 fs module/subprojects/subbie/subsub/meson.build @@ -0,0 +1,3 @@ +assert(fs.exists('subsubfile.txt'), 'Subproject subdir lookup failed.') +assert(fs.is_samepath(meson.project_source_root(), meson.current_source_dir() / '..'), 'is_samepath not detecting same directory') +assert(fs.is_samepath(meson.project_build_root(), meson.current_build_dir() / '..'), 'is_samepath not detecting same directory') diff --git a/meson/test cases/common/220 fs module/subprojects/subbie/subsub/subsubfile.txt b/meson/test cases/common/220 fs module/subprojects/subbie/subsub/subsubfile.txt new file mode 100644 index 000000000..2d5120d66 --- /dev/null +++ b/meson/test cases/common/220 fs module/subprojects/subbie/subsub/subsubfile.txt @@ -0,0 +1 @@ +Thank you for looking inside me. diff --git a/meson/test cases/common/221 zlib/meson.build b/meson/test cases/common/221 zlib/meson.build new file mode 100644 index 000000000..c53f71e4c --- /dev/null +++ b/meson/test cases/common/221 zlib/meson.build @@ -0,0 +1,23 @@ +project('zlib system dependency', 'c') + +if not ['darwin', 'freebsd', 'dragonfly', 'windows'].contains(host_machine.system()) + error('MESON_SKIP_TEST only applicable on macOS, FreeBSD, DragonflyBSD, and Windows.') +endif + +cc = meson.get_compiler('c') + +if host_machine.system() == 'darwin' and cc.get_id() != 'clang' + # this will only work on mac if using Apple's clang compiler, but there is no + # way in the meson source level to differentiate apple clang and llvm clang + # In the meson CI only apple clang is tested + error('MESON_SKIP_TEST on macOS only clang is supported.') +endif + +if not (cc.find_library('z', required: false).found() or + cc.find_library('zlib', required : false).found() or + cc.find_library('zlib1', required : false).found()) + error('MESON_SKIP_TEST Cannot seem to find zlib via find_library, this test will probably fail.') +endif + +z = dependency('zlib', method : 'system') +assert(z.version().version_compare('>= 1.2'), 'Version does not seem to have been detected correctly.') diff --git a/meson/test cases/common/222 native prop/crossfile.ini b/meson/test cases/common/222 native prop/crossfile.ini new file mode 100644 index 000000000..13deef3de --- /dev/null +++ b/meson/test cases/common/222 native prop/crossfile.ini @@ -0,0 +1,4 @@ +[properties] +astring = 'cross' +anarray = ['one', 'two'] +red = true diff --git a/meson/test cases/common/222 native prop/meson.build b/meson/test cases/common/222 native prop/meson.build new file mode 100644 index 000000000..87523719e --- /dev/null +++ b/meson/test cases/common/222 native prop/meson.build @@ -0,0 +1,49 @@ +project('get prop') + +x = meson.get_external_property('astring') +ref = meson.is_cross_build() ? 'cross' : 'mystring' +assert(x==ref, 'did not get native property string. did you use "meson setup --native-file native.txt"') + +x = meson.get_external_property('astring', native: true) +assert(x=='mystring', 'did not get native property with native:true and non-cross build.') + +x = meson.get_external_property('astring', 'fallback', native: false) +assert(x==ref, 'did not get get native property with native:false and non-cross build.') + + +x = meson.get_external_property('notexist', 'fallback') +assert(x=='fallback', 'fallback did not work') + +x = meson.get_external_property('notexist', 'fallback', native: true) +assert(x=='fallback', 'fallback native:true did not work') + +x = meson.get_external_property('notexist', 'fallback', native: false) +assert(x=='fallback', 'fallback native:false did not work') + + +x = meson.get_external_property('anarray') +assert(x==['one', 'two'], 'array did not work') + +assert(meson.has_external_property('anarray'), 'expected property "anarray" to exist') +assert(meson.has_external_property('astring'), 'expected property "astring" to exist') +assert(not meson.has_external_property('abool'), 'did not expect property "abool" to exist') + +# These exist in both +assert(meson.has_external_property('anarray', native: false), 'FIXME') +assert(meson.has_external_property('anarray', native: true), 'FIXME') +assert(meson.has_external_property('astring', native: false), 'FIXME') +assert(meson.has_external_property('astring', native: true), 'FIXME') + +if meson.is_cross_build() + # This property only exists in the cross file + assert(meson.has_external_property('red'), 'expected property "red" to exist in cross file') + assert(meson.has_external_property('red', native: false), 'expected property "red" to exist in cross file') + assert(not meson.has_external_property('red', native: true), 'did not expect property "red" to exist in native file') + + assert(not meson.has_external_property('abool', native: false), 'FIXME') + assert(not meson.has_external_property('abool', native: false), 'FIXME') +else + assert(not meson.has_external_property('red'), 'did not expect property "red" to exist in native file') + assert(not meson.has_external_property('red', native: false), 'did not expect property "red" to exist in cross file because we are not doing a cross build') + assert(not meson.has_external_property('red', native: true), 'did not expect property "red" to exist in native file') +endif diff --git a/meson/test cases/common/222 native prop/nativefile.ini b/meson/test cases/common/222 native prop/nativefile.ini new file mode 100644 index 000000000..03c1e0397 --- /dev/null +++ b/meson/test cases/common/222 native prop/nativefile.ini @@ -0,0 +1,3 @@ +[properties] +astring = 'mystring' +anarray = ['one', 'two'] \ No newline at end of file diff --git a/meson/test cases/common/223 persubproject options/foo.c b/meson/test cases/common/223 persubproject options/foo.c new file mode 100644 index 000000000..63e4de6a3 --- /dev/null +++ b/meson/test cases/common/223 persubproject options/foo.c @@ -0,0 +1,5 @@ +int foo(void); + +int foo(void) { + return 0; +} diff --git a/meson/test cases/common/223 persubproject options/meson.build b/meson/test cases/common/223 persubproject options/meson.build new file mode 100644 index 000000000..b9cbfe285 --- /dev/null +++ b/meson/test cases/common/223 persubproject options/meson.build @@ -0,0 +1,14 @@ +project('persubproject options', 'c', + default_options : ['werror=true', + 'warning_level=3']) + +assert(get_option('default_library') == 'both', 'Parent default_library should be "both"') +assert(get_option('werror')) +assert(get_option('warning_level') == '3') + +# Check it build both by calling a method only both_libraries target implement +lib = library('lib1', 'foo.c') +lib.get_static_lib() + +subproject('sub1') +subproject('sub2', default_options : ['default_library=static']) diff --git a/meson/test cases/common/223 persubproject options/subprojects/sub1/foo.c b/meson/test cases/common/223 persubproject options/subprojects/sub1/foo.c new file mode 100644 index 000000000..82ad2c2d0 --- /dev/null +++ b/meson/test cases/common/223 persubproject options/subprojects/sub1/foo.c @@ -0,0 +1,8 @@ +int foo(void); + +int foo(void) { + /* This is built with -Werror, it would error if warning_level=3 was inherited + * from main project and not overridden by this subproject's default_options. */ + int x; + return 0; +} diff --git a/meson/test cases/common/223 persubproject options/subprojects/sub1/meson.build b/meson/test cases/common/223 persubproject options/subprojects/sub1/meson.build new file mode 100644 index 000000000..4e4bc1ba0 --- /dev/null +++ b/meson/test cases/common/223 persubproject options/subprojects/sub1/meson.build @@ -0,0 +1,9 @@ +project('sub1', 'c', + default_options : ['warning_level=0']) + +assert(get_option('default_library') == 'both', 'Should inherit parent project default_library') +assert(get_option('warning_level') == '0') + +# Check it build both by calling a method only both_libraries target implement +lib = library('lib1', 'foo.c') +lib.get_static_lib() diff --git a/meson/test cases/common/223 persubproject options/subprojects/sub2/foo.c b/meson/test cases/common/223 persubproject options/subprojects/sub2/foo.c new file mode 100644 index 000000000..cf7201b92 --- /dev/null +++ b/meson/test cases/common/223 persubproject options/subprojects/sub2/foo.c @@ -0,0 +1,9 @@ +int foo(void); + +#ifdef __GNUC__ +#warning This should not produce error +#endif + +int foo(void) { + return 0; +} diff --git a/meson/test cases/common/223 persubproject options/subprojects/sub2/meson.build b/meson/test cases/common/223 persubproject options/subprojects/sub2/meson.build new file mode 100644 index 000000000..f1226b8ba --- /dev/null +++ b/meson/test cases/common/223 persubproject options/subprojects/sub2/meson.build @@ -0,0 +1,10 @@ +project('sub2', 'c', + default_options : ['default_library=shared', + 'werror=false']) + +assert(get_option('default_library') == 'static', 'Parent should override default_library') +assert(not get_option('werror')) + +# If it doesn't build only a static library, it would make target name clash. +library('lib1', 'foo.c') +shared_library('lib1', 'foo.c') diff --git a/meson/test cases/common/223 persubproject options/test.json b/meson/test cases/common/223 persubproject options/test.json new file mode 100644 index 000000000..ccfa9ff5f --- /dev/null +++ b/meson/test cases/common/223 persubproject options/test.json @@ -0,0 +1,7 @@ +{ + "matrix": { + "options": { + "default_library": [ { "val": "both" } ] + } + } +} diff --git a/meson/test cases/common/224 arithmetic operators/meson.build b/meson/test cases/common/224 arithmetic operators/meson.build new file mode 100644 index 000000000..a904bd040 --- /dev/null +++ b/meson/test cases/common/224 arithmetic operators/meson.build @@ -0,0 +1,8 @@ +project('arithmetic operators') +assert(5 - 3 - 1 == 1) +assert(5 - (3 - 1) == 3) +assert(5 - 1 * 3 - 3 == -1) +assert(420 - 300 - 51 == 69) +assert(1000 / 2 / 2 / 2 == 125) +assert(4 * 9 / 3 % 8 - 3 - 10 / 2 == -4) +assert(94 - 30 + (2 - (40 - 6 + 7) - 9) - 10 == 6) diff --git a/meson/test cases/common/225 link language/c_linkage.cpp b/meson/test cases/common/225 link language/c_linkage.cpp new file mode 100644 index 000000000..dc006b936 --- /dev/null +++ b/meson/test cases/common/225 link language/c_linkage.cpp @@ -0,0 +1,5 @@ +extern "C" { + int makeInt(void) { + return 0; + } +} diff --git a/meson/test cases/common/225 link language/c_linkage.h b/meson/test cases/common/225 link language/c_linkage.h new file mode 100644 index 000000000..1609f4702 --- /dev/null +++ b/meson/test cases/common/225 link language/c_linkage.h @@ -0,0 +1,10 @@ + +#ifdef __cplusplus +extern "C" { +#endif + +int makeInt(void); + +#ifdef __cplusplus +} +#endif diff --git a/meson/test cases/common/225 link language/lib.cpp b/meson/test cases/common/225 link language/lib.cpp new file mode 100644 index 000000000..ab4382832 --- /dev/null +++ b/meson/test cases/common/225 link language/lib.cpp @@ -0,0 +1,5 @@ +extern "C" { + int makeInt(void) { + return 1; + } +} diff --git a/meson/test cases/common/225 link language/main.c b/meson/test cases/common/225 link language/main.c new file mode 100644 index 000000000..5a167e785 --- /dev/null +++ b/meson/test cases/common/225 link language/main.c @@ -0,0 +1,5 @@ +#include "c_linkage.h" + +int main(void) { + return makeInt(); +} diff --git a/meson/test cases/common/225 link language/meson.build b/meson/test cases/common/225 link language/meson.build new file mode 100644 index 000000000..f9af6cd32 --- /dev/null +++ b/meson/test cases/common/225 link language/meson.build @@ -0,0 +1,18 @@ +project( + 'link_language', + ['c', 'cpp'], +) + +exe = executable( + 'main', + ['main.c', 'c_linkage.cpp'], + link_language : 'c', +) + +lib = library( + 'mylib', + ['lib.cpp'], + link_language : 'c', +) + +test('main', exe) diff --git a/meson/test cases/common/226 link depends indexed custom target/check_arch.py b/meson/test cases/common/226 link depends indexed custom target/check_arch.py new file mode 100644 index 000000000..927bf87c0 --- /dev/null +++ b/meson/test cases/common/226 link depends indexed custom target/check_arch.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 + +import re +import sys +import shutil +import subprocess + +exepath = sys.argv[1] +want_arch = sys.argv[2] +dummy_output = sys.argv[3] + +with open(dummy_output, 'w') as f: + f.write('') + +if not shutil.which('dumpbin'): + print('dumpbin not found, skipping') + sys.exit(0) + +out = subprocess.check_output(['dumpbin', '/HEADERS', exepath], + universal_newlines=True) +for line in out.split('\n'): + m = re.match(r'.* machine \(([A-Za-z0-9]+)\)$', line) + if m: + arch = m.groups()[0].lower() + +if arch == 'arm64': + arch = 'aarch64' +elif arch == 'x64': + arch = 'x86_64' + +if arch != want_arch: + raise RuntimeError(f'Wanted arch {want_arch} but exe uses {arch}') diff --git a/meson/test cases/common/226 link depends indexed custom target/foo.c b/meson/test cases/common/226 link depends indexed custom target/foo.c new file mode 100644 index 000000000..58c86a62b --- /dev/null +++ b/meson/test cases/common/226 link depends indexed custom target/foo.c @@ -0,0 +1,15 @@ +#include + +int main(void) { + const char *fn = DEPFILE; + FILE *f = fopen(fn, "r"); + if (!f) { + printf("could not open %s", fn); + return 1; + } + else { + printf("successfully opened %s", fn); + } + + return 0; +} diff --git a/meson/test cases/common/226 link depends indexed custom target/make_file.py b/meson/test cases/common/226 link depends indexed custom target/make_file.py new file mode 100644 index 000000000..6a43b7d05 --- /dev/null +++ b/meson/test cases/common/226 link depends indexed custom target/make_file.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +import sys + +with open(sys.argv[1], 'w') as f: + print('# this file does nothing', file=f) + +with open(sys.argv[2], 'w') as f: + print('# this file does nothing', file=f) diff --git a/meson/test cases/common/226 link depends indexed custom target/meson.build b/meson/test cases/common/226 link depends indexed custom target/meson.build new file mode 100644 index 000000000..c41c4c1e7 --- /dev/null +++ b/meson/test cases/common/226 link depends indexed custom target/meson.build @@ -0,0 +1,25 @@ +project('link_depends_indexed_custom_target', 'c') + +if meson.backend().startswith('vs') + # FIXME: Broken on the VS backends + error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799') +endif + +cmd = find_program('make_file.py') + +dep_files = custom_target('gen_dep', + command: [cmd, '@OUTPUT@'], + output: ['dep_file1', 'dep_file2']) + +exe = executable('foo', 'foo.c', + link_depends: dep_files[1], + c_args: ['-DDEPFILE="' + dep_files[0].full_path()+ '"']) + +check_arch = find_program('check_arch.py') +custom_target('check-arch', + command: [check_arch, exe, host_machine.cpu_family(), '@OUTPUT@'], + build_by_default: true, + output: 'dummy.txt') + +# check that dep_file1 exists, which means that link_depends target ran +test('runtest', exe) diff --git a/meson/test cases/common/227 very long commmand line/codegen.py b/meson/test cases/common/227 very long commmand line/codegen.py new file mode 100755 index 000000000..b1de6073e --- /dev/null +++ b/meson/test cases/common/227 very long commmand line/codegen.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import sys +from pathlib import Path + +Path(sys.argv[2]).write_text( + 'int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1])) diff --git a/meson/test cases/common/227 very long commmand line/main.c b/meson/test cases/common/227 very long commmand line/main.c new file mode 100644 index 000000000..78f2de106 --- /dev/null +++ b/meson/test cases/common/227 very long commmand line/main.c @@ -0,0 +1 @@ +int main(void) { return 0; } diff --git a/meson/test cases/common/227 very long commmand line/meson.build b/meson/test cases/common/227 very long commmand line/meson.build new file mode 100644 index 000000000..70058e6c8 --- /dev/null +++ b/meson/test cases/common/227 very long commmand line/meson.build @@ -0,0 +1,49 @@ +project('very long command lines', 'c') + +# Get the current system's commandline length limit. +if build_machine.system() == 'windows' + # Various limits on windows: + # cmd.exe: 8kb + # CreateProcess: 32kb + limit = 32767 + # NOTE: filename limit is 260 characters unless + # 1. Python >= 3.6 is being used + # 2. Windows 10 registry has been edited to enable long pathnaems + # ninja backend uses absolute filenames, so we ensure they don't exceed 260. +elif build_machine.system() == 'cygwin' + # cygwin-to-win32: see above + # cygwin-to-cygwin: no limit? + # Cygwin is slow, so only test it lightly here. + limit = 8192 +else + # ninja passes whole line as a single argument, for which + # the limit is 128k as of Linux 2.6.23. See MAX_ARG_STRLEN. + # BSD seems similar, see https://www.in-ulm.de/~mascheck/various/argmax + limit = 131072 +endif +# Now exceed that limit, but not so far that the test takes too long. +namelen = 260 +nfiles = 50 + limit / namelen +message('Expected link commandline length is approximately ' + '@0@'.format((nfiles * (namelen+28)))) + +seq = run_command('name_gen.py', nfiles.to_string(), meson.build_root()).stdout().strip().split('\n') + +sources = [] +codegen = find_program('codegen.py') + +i=0 +foreach name : seq + sources += custom_target('codegen' + i.to_string(), + command: [codegen, i.to_string(), '@OUTPUT@'], + output: name + '.c') + i+=1 +endforeach + +shared_library('sharedlib', sources) +static_library('staticlib', sources) +executable('app', 'main.c', sources) + +# Also test short commandlines to make sure that doesn't regress +shared_library('sharedlib0', sources[0]) +static_library('staticlib0', sources[0]) +executable('app0', 'main.c', sources[0]) diff --git a/meson/test cases/common/227 very long commmand line/name_gen.py b/meson/test cases/common/227 very long commmand line/name_gen.py new file mode 100755 index 000000000..84352984a --- /dev/null +++ b/meson/test cases/common/227 very long commmand line/name_gen.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +""" +generate sequence of filename that does not exceed MAX_LEN=260 +for Python < 3.6 and Windows without modified registry +""" + +import sys +import string + +name_len = 260 - len(sys.argv[2]) - 4 - 39 - 4 - 2 +if name_len < 1: + raise ValueError('The meson build directory pathname is so long ' + 'that we cannot generate filenames within 260 characters.') +# leave room for suffix and file separators, and meson generated text +# e.g. ".c.obj.d" and other decorators added by Meson at configuration +# for intermediate files + +base = string.ascii_letters * 5 # 260 characters +max_num_len = len(str(sys.argv[1])) +base = base[: name_len - max_num_len] + +for i in range(int(sys.argv[1])): + print("{base}{i:0{max_num_len}d}".format(base=base, max_num_len=max_num_len, i=i)) diff --git a/meson/test cases/common/228 custom_target source/a b/meson/test cases/common/228 custom_target source/a new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/228 custom_target source/meson.build b/meson/test cases/common/228 custom_target source/meson.build new file mode 100644 index 000000000..98b9d2647 --- /dev/null +++ b/meson/test cases/common/228 custom_target source/meson.build @@ -0,0 +1,5 @@ +project('a', ['c']) + +x = find_program('x.py') +outs = custom_target('foo', output: ['x.c', 'y'], input: 'a', command: [x]) +executable('testprog', outs[0]) diff --git a/meson/test cases/common/228 custom_target source/x.py b/meson/test cases/common/228 custom_target source/x.py new file mode 100644 index 000000000..12f40c83f --- /dev/null +++ b/meson/test cases/common/228 custom_target source/x.py @@ -0,0 +1,5 @@ +#! /usr/bin/env python3 +with open('x.c', 'w') as f: + print('int main(void) { return 0; }', file=f) +with open('y', 'w'): + pass diff --git a/meson/test cases/common/229 disabler array addition/meson.build b/meson/test cases/common/229 disabler array addition/meson.build new file mode 100644 index 000000000..231f76acf --- /dev/null +++ b/meson/test cases/common/229 disabler array addition/meson.build @@ -0,0 +1,9 @@ +project('disabler_inside_array', 'c') + +exes = [] + +exes += library('a', 'test.c') + +exes += library('b', 'test.c', dependencies : disabler()) + +exes += library('c', 'test.c') diff --git a/meson/test cases/common/229 disabler array addition/test.c b/meson/test cases/common/229 disabler array addition/test.c new file mode 100644 index 000000000..e9a7aac03 --- /dev/null +++ b/meson/test cases/common/229 disabler array addition/test.c @@ -0,0 +1 @@ +int stub(void) { return 0; } diff --git a/meson/test cases/common/23 endian/meson.build b/meson/test cases/common/23 endian/meson.build new file mode 100644 index 000000000..80186fef8 --- /dev/null +++ b/meson/test cases/common/23 endian/meson.build @@ -0,0 +1,7 @@ +project('endian check', 'c') + +if host_machine.endian() == 'big' + add_global_arguments('-DIS_BE', language : 'c') +endif + +test('endiantest', executable('prog', 'prog.c')) diff --git a/meson/test cases/common/23 endian/prog.c b/meson/test cases/common/23 endian/prog.c new file mode 100644 index 000000000..90bd9583a --- /dev/null +++ b/meson/test cases/common/23 endian/prog.c @@ -0,0 +1,24 @@ +#include + +int is_big_endian(void) { + uint32_t one = 1; + if(*((uint8_t*) &one) == 1) + return 0; + return 1; +} + + +int main(void) { + int is_be_check = is_big_endian(); + int is_be; +#ifdef IS_BE + is_be = 1; +#else + is_be = 0; +#endif + if(is_be_check && is_be) + return 0; + if(!is_be_check && !is_be) + return 0; + return 1; +} diff --git a/meson/test cases/common/230 external project/app.c b/meson/test cases/common/230 external project/app.c new file mode 100644 index 000000000..166f00737 --- /dev/null +++ b/meson/test cases/common/230 external project/app.c @@ -0,0 +1,7 @@ +#include + +int main(void) +{ + return call_foo() == 42 ? 0 : 1; +} + diff --git a/meson/test cases/common/230 external project/func.c b/meson/test cases/common/230 external project/func.c new file mode 100644 index 000000000..5e8f933f5 --- /dev/null +++ b/meson/test cases/common/230 external project/func.c @@ -0,0 +1,7 @@ +#include "func.h" + +int func(void) +{ + return 1; +} + diff --git a/meson/test cases/common/230 external project/func.h b/meson/test cases/common/230 external project/func.h new file mode 100644 index 000000000..340b82ac8 --- /dev/null +++ b/meson/test cases/common/230 external project/func.h @@ -0,0 +1 @@ +int func(void); diff --git a/meson/test cases/common/230 external project/libfoo/configure b/meson/test cases/common/230 external project/libfoo/configure new file mode 100755 index 000000000..0e4aa72b2 --- /dev/null +++ b/meson/test cases/common/230 external project/libfoo/configure @@ -0,0 +1,44 @@ +#! /bin/sh + +srcdir=$(dirname "$0") + +for i in "$@" +do +case $i in + --prefix=*) + PREFIX="${i#*=}" + shift + ;; + --libdir=*) + LIBDIR="${i#*=}" + shift + ;; + --includedir=*) + INCDIR="${i#*=}" + shift + ;; + --libext=*) + LIBEXT="${i#*=}" + shift + ;; + *) + shift + ;; +esac +done + +DEP_ARGS=$(pkg-config --cflags --libs somelib) + +cat > Makefile << EOL +all: libfoo.$LIBEXT + +libfoo.$LIBEXT: + $CC "$srcdir/libfoo.c" -shared -fPIC $DEP_ARGS -o \$@ + +install: libfoo.$LIBEXT + mkdir -p "\$(DESTDIR)$LIBDIR"; + mkdir -p "\$(DESTDIR)$LIBDIR/pkgconfig"; + mkdir -p "\$(DESTDIR)$INCDIR"; + cp \$< "\$(DESTDIR)$LIBDIR"; + cp "$srcdir/libfoo.h" "\$(DESTDIR)$INCDIR"; +EOL diff --git a/meson/test cases/common/230 external project/libfoo/libfoo.c b/meson/test cases/common/230 external project/libfoo/libfoo.c new file mode 100644 index 000000000..3f6228243 --- /dev/null +++ b/meson/test cases/common/230 external project/libfoo/libfoo.c @@ -0,0 +1,8 @@ +#include "libfoo.h" + +int func(void); + +int call_foo() +{ + return func() == 1 ? 42 : 0; +} diff --git a/meson/test cases/common/230 external project/libfoo/libfoo.h b/meson/test cases/common/230 external project/libfoo/libfoo.h new file mode 100644 index 000000000..8981f18b4 --- /dev/null +++ b/meson/test cases/common/230 external project/libfoo/libfoo.h @@ -0,0 +1,3 @@ +#pragma once + +int call_foo(void); diff --git a/meson/test cases/common/230 external project/libfoo/meson.build b/meson/test cases/common/230 external project/libfoo/meson.build new file mode 100644 index 000000000..941e13f94 --- /dev/null +++ b/meson/test cases/common/230 external project/libfoo/meson.build @@ -0,0 +1,22 @@ +mod = import('unstable_external_project') + +target_system = target_machine.system() +if target_system in ['windows', 'cygwin'] + libext = 'dll' +elif target_system == 'darwin' + libext = 'dylib' +else + libext = 'so' +endif + +p = mod.add_project('configure', + configure_options : [ + '--prefix=@PREFIX@', + '--libdir=@PREFIX@/@LIBDIR@', + '--includedir=@PREFIX@/@INCLUDEDIR@', + '--libext=' + libext, + ], +) + +libfoo_dep = declare_dependency(link_with : somelib, + dependencies : p.dependency('foo')) diff --git a/meson/test cases/common/230 external project/meson.build b/meson/test cases/common/230 external project/meson.build new file mode 100644 index 000000000..d1ed797c4 --- /dev/null +++ b/meson/test cases/common/230 external project/meson.build @@ -0,0 +1,27 @@ +project('test external project', 'c') + +if not find_program('pkg-config', required: false).found() + error('MESON_SKIP_TEST: pkg-config not found') +endif + +if not find_program('make', required : false).found() + error('MESON_SKIP_TEST: make not found') +endif + +if host_machine.system() == 'windows' + error('MESON_SKIP_TEST: The fake configure script is too dumb to work on Windows') +endif + +if meson.is_cross_build() + # CI uses PKG_CONFIG_SYSROOT_DIR which breaks -uninstalled.pc usage. + error('MESON_SKIP_TEST: Cross build support is too limited for this test') +endif + +pkg = import('pkgconfig') + +somelib = library('somelib', 'func.c') +pkg.generate(somelib) + +subdir('libfoo') + +executable('test-find-library', 'app.c', dependencies : libfoo_dep) diff --git a/meson/test cases/common/230 external project/test.json b/meson/test cases/common/230 external project/test.json new file mode 100644 index 000000000..4888e8752 --- /dev/null +++ b/meson/test cases/common/230 external project/test.json @@ -0,0 +1,7 @@ +{ + "installed": [ + { "type": "shared_lib", "file": "usr/lib/foo" }, + { "type": "file", "file": "usr/include/libfoo.h" }, + { "type": "file", "file": "usr/lib/pkgconfig/somelib.pc" } + ] +} diff --git a/meson/test cases/common/231 subdir files/meson.build b/meson/test cases/common/231 subdir files/meson.build new file mode 100644 index 000000000..eb472f39e --- /dev/null +++ b/meson/test cases/common/231 subdir files/meson.build @@ -0,0 +1,3 @@ +project('subdir files test', 'c') +subdir('subdir') +executable('prog', sources: subdir_sources) diff --git a/meson/test cases/common/231 subdir files/subdir/meson.build b/meson/test cases/common/231 subdir files/subdir/meson.build new file mode 100644 index 000000000..70909c72f --- /dev/null +++ b/meson/test cases/common/231 subdir files/subdir/meson.build @@ -0,0 +1 @@ +subdir_sources = files(['prog.c']) diff --git a/meson/test cases/common/231 subdir files/subdir/prog.c b/meson/test cases/common/231 subdir files/subdir/prog.c new file mode 100644 index 000000000..78f2de106 --- /dev/null +++ b/meson/test cases/common/231 subdir files/subdir/prog.c @@ -0,0 +1 @@ +int main(void) { return 0; } diff --git a/meson/test cases/common/232 dependency allow_fallback/meson.build b/meson/test cases/common/232 dependency allow_fallback/meson.build new file mode 100644 index 000000000..b189faf12 --- /dev/null +++ b/meson/test cases/common/232 dependency allow_fallback/meson.build @@ -0,0 +1,12 @@ +project('subproject fallback', 'c') + +foob_dep = dependency('foob', allow_fallback: true, required: false) +assert(foob_dep.found()) + +# Careful! Once a submodule has been triggered and it has +# overridden the dependency, it sticks. +foob_dep = dependency('foob', allow_fallback: false, required: false) +assert(foob_dep.found()) + +foob3_dep = dependency('foob3', allow_fallback: false, required: false) +assert(not foob3_dep.found()) diff --git a/meson/test cases/common/232 dependency allow_fallback/subprojects/foob/meson.build b/meson/test cases/common/232 dependency allow_fallback/subprojects/foob/meson.build new file mode 100644 index 000000000..b2c4814e2 --- /dev/null +++ b/meson/test cases/common/232 dependency allow_fallback/subprojects/foob/meson.build @@ -0,0 +1,2 @@ +project('foob', 'c') +meson.override_dependency('foob', declare_dependency()) diff --git a/meson/test cases/common/232 dependency allow_fallback/subprojects/foob3/meson.build b/meson/test cases/common/232 dependency allow_fallback/subprojects/foob3/meson.build new file mode 100644 index 000000000..9fdb18817 --- /dev/null +++ b/meson/test cases/common/232 dependency allow_fallback/subprojects/foob3/meson.build @@ -0,0 +1,2 @@ +project('foob3', 'c') +# Note that there is no override_dependency here diff --git a/meson/test cases/common/233 wrap case/meson.build b/meson/test cases/common/233 wrap case/meson.build new file mode 100644 index 000000000..2b82bf392 --- /dev/null +++ b/meson/test cases/common/233 wrap case/meson.build @@ -0,0 +1,6 @@ +project('CaSe DePenDenCy In Wrap', 'c') + +d = dependency('UP_down') + +e = executable('prog', 'prog.c', dependencies: d) +test('prog', e) diff --git a/meson/test cases/common/233 wrap case/prog.c b/meson/test cases/common/233 wrap case/prog.c new file mode 100644 index 000000000..5183deabd --- /dev/null +++ b/meson/test cases/common/233 wrap case/prog.c @@ -0,0 +1,13 @@ +#include +#include + +int main(int argc, char **argv) { + if(argc == 42) { + printf("Very sneaky, %s\n", argv[0]); + } +#ifdef UP_IS_DOWN + return 0; +#else + return 1; +#endif +} diff --git a/meson/test cases/common/233 wrap case/subprojects/up_down.wrap b/meson/test cases/common/233 wrap case/subprojects/up_down.wrap new file mode 100644 index 000000000..d66818fe8 --- /dev/null +++ b/meson/test cases/common/233 wrap case/subprojects/up_down.wrap @@ -0,0 +1,5 @@ +[wrap-file] +directory = up_down + +[provide] +UP_down = up_down_dep diff --git a/meson/test cases/common/233 wrap case/subprojects/up_down/meson.build b/meson/test cases/common/233 wrap case/subprojects/up_down/meson.build new file mode 100644 index 000000000..5db89d15d --- /dev/null +++ b/meson/test cases/common/233 wrap case/subprojects/up_down/meson.build @@ -0,0 +1,3 @@ +project('up down', 'c') + +up_down_dep = declare_dependency(include_directories: '.') diff --git a/meson/test cases/common/233 wrap case/subprojects/up_down/up_down.h b/meson/test cases/common/233 wrap case/subprojects/up_down/up_down.h new file mode 100644 index 000000000..8d968ce14 --- /dev/null +++ b/meson/test cases/common/233 wrap case/subprojects/up_down/up_down.h @@ -0,0 +1,3 @@ +#pragma once + +#define UP_IS_DOWN diff --git a/meson/test cases/common/234 get_file_contents/.gitattributes b/meson/test cases/common/234 get_file_contents/.gitattributes new file mode 100644 index 000000000..abec47db4 --- /dev/null +++ b/meson/test cases/common/234 get_file_contents/.gitattributes @@ -0,0 +1 @@ +utf-16-text binary diff --git a/meson/test cases/common/234 get_file_contents/VERSION b/meson/test cases/common/234 get_file_contents/VERSION new file mode 100644 index 000000000..26aaba0e8 --- /dev/null +++ b/meson/test cases/common/234 get_file_contents/VERSION @@ -0,0 +1 @@ +1.2.0 diff --git a/meson/test cases/common/234 get_file_contents/meson.build b/meson/test cases/common/234 get_file_contents/meson.build new file mode 100644 index 000000000..a8c68d63f --- /dev/null +++ b/meson/test cases/common/234 get_file_contents/meson.build @@ -0,0 +1,21 @@ +project( + 'meson-fs-read-file', + [], + version: files('VERSION') +) +fs = import('fs') + +assert(fs.read('VERSION').strip() == meson.project_version(), 'file misread') + +expected = ( + '∮ Eâ‹…da = Q, n → ∞, ∑ f(i) = ∠g(i), ∀x∈â„: ⌈x⌉ = −⌊−x⌋, α ∧ ¬β = ¬(¬α ∨ β)' +) +assert( + fs.read('utf-16-text', encoding: 'utf-16').strip() == expected, + 'file was not decoded correctly' +) + +# Make sure we handle `files()` objects properly, too +version_file = files('VERSION') + +subdir('other') diff --git a/meson/test cases/common/234 get_file_contents/other/meson.build b/meson/test cases/common/234 get_file_contents/other/meson.build new file mode 100644 index 000000000..9a7e4be56 --- /dev/null +++ b/meson/test cases/common/234 get_file_contents/other/meson.build @@ -0,0 +1,3 @@ +fs = import('fs') +assert(fs.read(version_file).strip() == '1.2.0') +assert(fs.read('../VERSION').strip() == '1.2.0') diff --git a/meson/test cases/common/234 get_file_contents/utf-16-text b/meson/test cases/common/234 get_file_contents/utf-16-text new file mode 100644 index 000000000..ed1fefe83 Binary files /dev/null and b/meson/test cases/common/234 get_file_contents/utf-16-text differ diff --git a/meson/test cases/common/235 invalid standard overriden to valid/main.c b/meson/test cases/common/235 invalid standard overriden to valid/main.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/common/235 invalid standard overriden to valid/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/235 invalid standard overriden to valid/meson.build b/meson/test cases/common/235 invalid standard overriden to valid/meson.build new file mode 100644 index 000000000..9463e4341 --- /dev/null +++ b/meson/test cases/common/235 invalid standard overriden to valid/meson.build @@ -0,0 +1,8 @@ +project( + 'invalid C standard overriden to valid one', + 'c', + default_options : ['c_std=invalid99'], +) + +exe = executable('main', 'main.c') +test('main', exe) diff --git a/meson/test cases/common/235 invalid standard overriden to valid/test.json b/meson/test cases/common/235 invalid standard overriden to valid/test.json new file mode 100644 index 000000000..c9b00ce19 --- /dev/null +++ b/meson/test cases/common/235 invalid standard overriden to valid/test.json @@ -0,0 +1,9 @@ +{ + "matrix": { + "options": { + "c_std": [ + { "val": "c89" } + ] + } + } +} diff --git a/meson/test cases/common/236 proper args splitting/main.c b/meson/test cases/common/236 proper args splitting/main.c new file mode 100644 index 000000000..4b2038fcb --- /dev/null +++ b/meson/test cases/common/236 proper args splitting/main.c @@ -0,0 +1,11 @@ +#ifndef FOO +# error "FOO is not defined" +#endif + +#ifndef BAR +# error "BAR is not defined" +#endif + +int main(void) { + return 0; +} diff --git a/meson/test cases/common/236 proper args splitting/meson.build b/meson/test cases/common/236 proper args splitting/meson.build new file mode 100644 index 000000000..4a36f9cbb --- /dev/null +++ b/meson/test cases/common/236 proper args splitting/meson.build @@ -0,0 +1,9 @@ +project('proper args splitting', 'c') + +test( + 'main', + executable( + 'main', + 'main.c', + ) +) diff --git a/meson/test cases/common/236 proper args splitting/test.json b/meson/test cases/common/236 proper args splitting/test.json new file mode 100644 index 000000000..b1738aab0 --- /dev/null +++ b/meson/test cases/common/236 proper args splitting/test.json @@ -0,0 +1,9 @@ +{ + "matrix": { + "options": { + "c_args": [ + { "val": "-DFOO -DBAR" } + ] + } + } +} diff --git a/meson/test cases/common/237 fstrings/meson.build b/meson/test cases/common/237 fstrings/meson.build new file mode 100644 index 000000000..2db2649b5 --- /dev/null +++ b/meson/test cases/common/237 fstrings/meson.build @@ -0,0 +1,7 @@ +project('meson-test', 'c') + +n = 10 +m = 'bar' +s = f'test @n@ string (@@n@@): @m@' + +assert(s == 'test 10 string (@10@): bar', 'Incorrect string formatting') diff --git a/meson/test cases/common/238 dependency include_type inconsistency/bar/meson.build b/meson/test cases/common/238 dependency include_type inconsistency/bar/meson.build new file mode 100644 index 000000000..6e1218b79 --- /dev/null +++ b/meson/test cases/common/238 dependency include_type inconsistency/bar/meson.build @@ -0,0 +1,5 @@ +baz_dep = dependency('baz', + fallback: ['baz', 'baz_dep'], + include_type: 'system', + method: 'pkg-config', # if we comment this out or change to 'auto' the build is successful + required: false) diff --git a/meson/test cases/common/238 dependency include_type inconsistency/meson.build b/meson/test cases/common/238 dependency include_type inconsistency/meson.build new file mode 100644 index 000000000..7f28e25b9 --- /dev/null +++ b/meson/test cases/common/238 dependency include_type inconsistency/meson.build @@ -0,0 +1,5 @@ +project('test', 'c', 'cpp') + +foo_dep = subproject('foo').get_variable('foo_dep') + +subdir('bar') diff --git a/meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz.wrap b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz.wrap new file mode 100644 index 000000000..c7277941e --- /dev/null +++ b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz.wrap @@ -0,0 +1,3 @@ +[wrap-file] +source_url = http://host.invalid/baz.tar.gz +source_filename = baz.tar.gz diff --git a/meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz/meson.build b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz/meson.build new file mode 100644 index 000000000..a6a37750e --- /dev/null +++ b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/baz/meson.build @@ -0,0 +1,3 @@ +project('baz', 'cpp') + +baz_dep = declare_dependency() diff --git a/meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo.wrap b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo.wrap new file mode 100644 index 000000000..dcc434b6b --- /dev/null +++ b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo.wrap @@ -0,0 +1,3 @@ +[wrap-file] +source_url = http://host.invalid/foo.tar.gz +source_filename = foo.tar.gz diff --git a/meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo/meson.build b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo/meson.build new file mode 100644 index 000000000..51d950346 --- /dev/null +++ b/meson/test cases/common/238 dependency include_type inconsistency/subprojects/foo/meson.build @@ -0,0 +1,9 @@ +project('foo', 'c', 'cpp') + +baz_dep = dependency('baz', + fallback: ['baz', 'baz_dep'], + include_type: 'system', + method: 'pkg-config', + required: false) + +foo_dep = declare_dependency(dependencies: baz_dep) diff --git a/meson/test cases/common/239 includedir violation/meson.build b/meson/test cases/common/239 includedir violation/meson.build new file mode 100644 index 000000000..0216be6ef --- /dev/null +++ b/meson/test cases/common/239 includedir violation/meson.build @@ -0,0 +1,11 @@ +project('foo', 'c') + +# It is fine to include the root source dir +include_directories('.') +subproject('sub') + +# This is here rather than in failing because this needs a +# transition period to avoid breaking existing projects. +# Once this becomes an error, move this under failing tests. + +inc = include_directories('subprojects/sub/include') diff --git a/meson/test cases/common/239 includedir violation/subprojects/sub/include/placeholder.h b/meson/test cases/common/239 includedir violation/subprojects/sub/include/placeholder.h new file mode 100644 index 000000000..196f917e4 --- /dev/null +++ b/meson/test cases/common/239 includedir violation/subprojects/sub/include/placeholder.h @@ -0,0 +1,3 @@ +#pragma once + +// Git can not handle empty directories, so there must be something here. diff --git a/meson/test cases/common/239 includedir violation/subprojects/sub/meson.build b/meson/test cases/common/239 includedir violation/subprojects/sub/meson.build new file mode 100644 index 000000000..352f4a261 --- /dev/null +++ b/meson/test cases/common/239 includedir violation/subprojects/sub/meson.build @@ -0,0 +1,3 @@ +project('subproj', 'c') + +include_directories('.') diff --git a/meson/test cases/common/239 includedir violation/test.json b/meson/test cases/common/239 includedir violation/test.json new file mode 100644 index 000000000..fea19a109 --- /dev/null +++ b/meson/test cases/common/239 includedir violation/test.json @@ -0,0 +1,9 @@ +{ + "stdout": [ + { + "line": ".*WARNING: include_directories sandbox violation!", + "match": "re", + "count": 1 + } + ] +} diff --git a/meson/test cases/common/24 library versions/lib.c b/meson/test cases/common/24 library versions/lib.c new file mode 100644 index 000000000..e49cb2d66 --- /dev/null +++ b/meson/test cases/common/24 library versions/lib.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC myFunc(void) { + return 55; +} diff --git a/meson/test cases/common/24 library versions/meson.build b/meson/test cases/common/24 library versions/meson.build new file mode 100644 index 000000000..2e2bef769 --- /dev/null +++ b/meson/test cases/common/24 library versions/meson.build @@ -0,0 +1,9 @@ +project('library versions', 'c') + +shared_library('somelib', 'lib.c', + name_prefix : 'prefix', + name_suffix : 'suffix', + install_dir : 'lib', + install : true) + +subdir('subdir') diff --git a/meson/test cases/common/24 library versions/subdir/meson.build b/meson/test cases/common/24 library versions/subdir/meson.build new file mode 100644 index 000000000..a83fdb5a9 --- /dev/null +++ b/meson/test cases/common/24 library versions/subdir/meson.build @@ -0,0 +1,8 @@ +# Test that using files generated with configure_file as sources works. +# We do this inside a subdir so that the path isn't accidentally correct +# because there is no structure in the build dir. +genlib = configure_file(input : '../lib.c', + output : 'genlib.c', + copy: true) +shared_library('genlib', genlib, + install : false) diff --git a/meson/test cases/common/24 library versions/test.json b/meson/test cases/common/24 library versions/test.json new file mode 100644 index 000000000..64b7ab0e0 --- /dev/null +++ b/meson/test cases/common/24 library versions/test.json @@ -0,0 +1,7 @@ +{ + "installed": [ + {"type": "file", "file": "usr/lib/prefixsomelib.suffix"}, + {"type": "implib", "file": "usr/lib/prefixsomelib"}, + {"type": "pdb", "file": "usr/lib/prefixsomelib"} + ] +} diff --git a/meson/test cases/common/240 dependency native host == build/meson.build b/meson/test cases/common/240 dependency native host == build/meson.build new file mode 100644 index 000000000..bdcd93cef --- /dev/null +++ b/meson/test cases/common/240 dependency native host == build/meson.build @@ -0,0 +1,18 @@ +project('foo', 'c') + +if meson.is_cross_build() + error('MESON_SKIP_TEST Test does not make sense for cross builds') +endif + +dep_zlib = dependency('zlib', required : false) +if not dep_zlib.found() + error('MESON_SKIP_TEST Test requires zlib') +endif +dependency('zlib', native : true, required : false) +dependency('zlib', native : false) + +# `native: true` should not make a difference when doing a native build. +meson.override_dependency('expat', declare_dependency()) +dependency('expat') +dependency('expat', native : true) +dependency('expat', native : false) diff --git a/meson/test cases/common/240 dependency native host == build/test.json b/meson/test cases/common/240 dependency native host == build/test.json new file mode 100644 index 000000000..5e2a715e3 --- /dev/null +++ b/meson/test cases/common/240 dependency native host == build/test.json @@ -0,0 +1,14 @@ +{ + "stdout": [ + { + "line": "Dependency zlib found: YES .* \\(cached\\)", + "match": "re", + "count": 2 + }, + { + "line": "Dependency expat found: YES .* \\(overridden\\)", + "match": "re", + "count": 3 + } + ] +} diff --git a/meson/test cases/common/241 set and get variable/meson.build b/meson/test cases/common/241 set and get variable/meson.build new file mode 100644 index 000000000..4ecdb3529 --- /dev/null +++ b/meson/test cases/common/241 set and get variable/meson.build @@ -0,0 +1,71 @@ +project('set and get') + +var1 = 'test1.txt' +var2 = files('test1.txt')[0] + +# Use is_disabler for accessing variables +assert(var1 == 'test1.txt') +assert(not is_disabler(var2)) + +# Ensure that set variables behave correctly +set_variable('var3', 'test2.txt') +set_variable('var4', files('test2.txt')[0]) + +assert(var3 == 'test2.txt') +assert(not is_disabler(var4)) + +# Test Equality +assert(var1 == get_variable('var1')) +assert(var2 == get_variable('var2')) + +# Test get_variable directly +assert(get_variable('var1') == 'test1.txt') +assert(not is_disabler(get_variable('var2'))) +assert(get_variable('var3') == 'test2.txt') +assert(not is_disabler(get_variable('var4'))) + +# Test get_variable indirectly + +var5 = get_variable('var1') +var6 = get_variable('var2') +var7 = get_variable('var3') +var8 = get_variable('var4') +set_variable('var9', get_variable('var7')) +set_variable('var0', get_variable('var8')) + +assert(var5 == 'test1.txt') +assert(not is_disabler(var6)) +assert(var7 == 'test2.txt') +assert(not is_disabler(var8)) +assert(get_variable('var9') == 'test2.txt') +assert(not is_disabler(get_variable('var0'))) +assert(not is_disabler(get_variable('var0', var8))) +assert(not is_disabler(get_variable('----', var8))) +assert(not is_disabler(get_variable('----', [var8]))) +assert(not is_disabler(get_variable('----', {'asd': var8}))) + +# test dict get +dict = {'a': var2} + +dict_t1 = dict['a'] +dict_t2 = dict.get('a') +dict_t3 = dict.get('a', var2) +dict_t4 = dict.get('b', var2) + +assert(not is_disabler(dict_t1)) +assert(not is_disabler(dict_t2)) +assert(not is_disabler(dict_t3)) +assert(not is_disabler(dict_t4)) + +# test lists +list = [var2] + +list_t1 = list[0] +list_t2 = list.get(0) +list_t3 = list.get(0, var2) +list_t4 = list.get(1, var2) + +assert(not is_disabler(list_t1)) +assert(not is_disabler(list_t2)) +assert(not is_disabler(list_t3)) +assert(not is_disabler(list_t4)) diff --git a/meson/test cases/common/241 set and get variable/test1.txt b/meson/test cases/common/241 set and get variable/test1.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/241 set and get variable/test2.txt b/meson/test cases/common/241 set and get variable/test2.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/242 custom target feed/data_source.txt b/meson/test cases/common/242 custom target feed/data_source.txt new file mode 100644 index 000000000..0c23cc0c3 --- /dev/null +++ b/meson/test cases/common/242 custom target feed/data_source.txt @@ -0,0 +1 @@ +This is a text only input file. diff --git a/meson/test cases/common/242 custom target feed/meson.build b/meson/test cases/common/242 custom target feed/meson.build new file mode 100644 index 000000000..1cda37d75 --- /dev/null +++ b/meson/test cases/common/242 custom target feed/meson.build @@ -0,0 +1,24 @@ +project('custom target feed', 'c') + +python3 = import('python3').find_python() + +# Note that this will not add a dependency to the compiler executable. +# Code will not be rebuilt if it changes. +comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py') + +mytarget = custom_target('bindat', + output : 'data.dat', + input : 'data_source.txt', + feed : true, + command : [python3, comp, '@OUTPUT@'], + install : true, + install_dir : 'subdir' +) + +ct_output_exists = '''import os, sys +if not os.path.exists(sys.argv[1]): + print("could not find {!r} in {!r}".format(sys.argv[1], os.getcwd())) + sys.exit(1) +''' + +test('capture-wrote', python3, args : ['-c', ct_output_exists, mytarget]) diff --git a/meson/test cases/common/242 custom target feed/my_compiler.py b/meson/test cases/common/242 custom target feed/my_compiler.py new file mode 100755 index 000000000..97d42087b --- /dev/null +++ b/meson/test cases/common/242 custom target feed/my_compiler.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import sys + +if __name__ == '__main__': + if len(sys.argv) != 2: + print(sys.argv[0], 'output_file') + sys.exit(1) + ifile = sys.stdin.read() + if ifile != 'This is a text only input file.\n': + print('Malformed input') + sys.exit(1) + with open(sys.argv[1], 'w+') as f: + f.write('This is a binary output file.') diff --git a/meson/test cases/common/242 custom target feed/test.json b/meson/test cases/common/242 custom target feed/test.json new file mode 100644 index 000000000..ba66b024a --- /dev/null +++ b/meson/test cases/common/242 custom target feed/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"type": "file", "file": "usr/subdir/data.dat"} + ] +} diff --git a/meson/test cases/common/243 escape++/meson.build b/meson/test cases/common/243 escape++/meson.build new file mode 100644 index 000000000..5fcc00f5c --- /dev/null +++ b/meson/test cases/common/243 escape++/meson.build @@ -0,0 +1,4 @@ +project('regex escape test', 'c') + +exe = executable('testprog', 'test.c') +test('runtest', exe) diff --git a/meson/test cases/common/243 escape++/test.c b/meson/test cases/common/243 escape++/test.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/common/243 escape++/test.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/25 config subdir/include/config.h.in b/meson/test cases/common/25 config subdir/include/config.h.in new file mode 100644 index 000000000..4c3c62dbe --- /dev/null +++ b/meson/test cases/common/25 config subdir/include/config.h.in @@ -0,0 +1,6 @@ +#ifndef CONFIG_H_ +#define CONFIG_H_ + +#define RETURN_VALUE @number@ + +#endif diff --git a/meson/test cases/common/25 config subdir/include/meson.build b/meson/test cases/common/25 config subdir/include/meson.build new file mode 100644 index 000000000..f14111abe --- /dev/null +++ b/meson/test cases/common/25 config subdir/include/meson.build @@ -0,0 +1,4 @@ +conf_data = configuration_data() +conf_data.set('number', '0') + +configure_file(input:'config.h.in', output:'config.h', configuration:conf_data) diff --git a/meson/test cases/common/25 config subdir/meson.build b/meson/test cases/common/25 config subdir/meson.build new file mode 100644 index 000000000..25f53dbfd --- /dev/null +++ b/meson/test cases/common/25 config subdir/meson.build @@ -0,0 +1,6 @@ +project('subdirconfig', 'c') + +inc = include_directories('include') + +subdir('include') +subdir('src') diff --git a/meson/test cases/common/25 config subdir/src/meson.build b/meson/test cases/common/25 config subdir/src/meson.build new file mode 100644 index 000000000..97598a49f --- /dev/null +++ b/meson/test cases/common/25 config subdir/src/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.c', include_directories : inc) +test('subdir config', exe) diff --git a/meson/test cases/common/25 config subdir/src/prog.c b/meson/test cases/common/25 config subdir/src/prog.c new file mode 100644 index 000000000..b9db89045 --- /dev/null +++ b/meson/test cases/common/25 config subdir/src/prog.c @@ -0,0 +1,5 @@ +#include "config.h" + +int main(void) { + return RETURN_VALUE; +} diff --git a/meson/test cases/common/26 find program/meson.build b/meson/test cases/common/26 find program/meson.build new file mode 100644 index 000000000..3b59caa93 --- /dev/null +++ b/meson/test cases/common/26 find program/meson.build @@ -0,0 +1,35 @@ +project('find program', 'c') + +if build_machine.system() == 'windows' + # Things Windows does not provide: + # - an executable to copy files without prompting + # - working command line quoting + # - anything that you might actually need + # Because of these reasons we only check that + # the program can be found. + cp = find_program('xcopy') +else + cp = find_program('donotfindme', 'cp') + gen = generator(cp, \ + output : '@BASENAME@.c', \ + arguments : ['@INPUT@', '@OUTPUT@']) + + generated = gen.process('source.in') + e = executable('prog', generated) + test('external exe', e) +endif + +prog = find_program('print-version.py', version : '>=2.0', required : false) +assert(not prog.found(), 'Version should be too old') + +prog = find_program('print-version.py', version : '>=1.0') +assert(prog.found(), 'Program version should match') + +prog = find_program('print-version-with-prefix.py', version : '>=1.0') +assert(prog.found(), 'Program version should match') + +prog = find_program('test_subdir.py', required : false) +assert(not prog.found(), 'Program should not be found') + +prog = find_program('test_subdir.py', dirs : ['/donotexist', meson.current_source_dir() / 'scripts']) +assert(prog.found(), 'Program should be found') diff --git a/meson/test cases/common/26 find program/print-version-with-prefix.py b/meson/test cases/common/26 find program/print-version-with-prefix.py new file mode 100644 index 000000000..520e0ba8c --- /dev/null +++ b/meson/test cases/common/26 find program/print-version-with-prefix.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +import sys + +if len(sys.argv) != 2 or sys.argv[1] != '--version': + exit(1) + +print('Version: 1.0') diff --git a/meson/test cases/common/26 find program/print-version.py b/meson/test cases/common/26 find program/print-version.py new file mode 100644 index 000000000..4a78e5b85 --- /dev/null +++ b/meson/test cases/common/26 find program/print-version.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +import sys + +if len(sys.argv) != 2 or sys.argv[1] != '--version': + exit(1) + +print('1.0') diff --git a/meson/test cases/common/26 find program/scripts/test_subdir.py b/meson/test cases/common/26 find program/scripts/test_subdir.py new file mode 100644 index 000000000..947ffe408 --- /dev/null +++ b/meson/test cases/common/26 find program/scripts/test_subdir.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +exit(0) diff --git a/meson/test cases/common/26 find program/source.in b/meson/test cases/common/26 find program/source.in new file mode 100644 index 000000000..03b2213bb --- /dev/null +++ b/meson/test cases/common/26 find program/source.in @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/27 multiline string/meson.build b/meson/test cases/common/27 multiline string/meson.build new file mode 100644 index 000000000..a87d29ad9 --- /dev/null +++ b/meson/test cases/common/27 multiline string/meson.build @@ -0,0 +1,37 @@ +project('multiline string', 'c') + +x = '''hello again''' +y = '''hello +again''' + +if x == y + error('Things are wrong.') +endif + +multieol = ''' +''' +singleeol = '\n' + +if multieol != singleeol + error('Newline quoting is broken.') +endif + +# And one more for good measure. +quote1 = ''' ' '''.strip() +quote2 = '\'' + +if quote1 != quote2 + error('Single quote quoting is broken.') +endif + +cc = meson.get_compiler('c') +prog = ''' +#include + +int main(void) { + int num = 1; + printf("%d\n", num); + return 0; +}''' + +assert(cc.compiles(prog), 'multline test compile failed') diff --git a/meson/test cases/common/28 try compile/invalid.c b/meson/test cases/common/28 try compile/invalid.c new file mode 100644 index 000000000..6c9cfb840 --- /dev/null +++ b/meson/test cases/common/28 try compile/invalid.c @@ -0,0 +1,2 @@ +#include +void func(void) { printf("This won't work.\n"); } diff --git a/meson/test cases/common/28 try compile/meson.build b/meson/test cases/common/28 try compile/meson.build new file mode 100644 index 000000000..cb41e1d5a --- /dev/null +++ b/meson/test cases/common/28 try compile/meson.build @@ -0,0 +1,27 @@ +project('try compile', 'c', 'cpp') + +code = '''#include +void func(void) { printf("Something.\n"); } +''' + +breakcode = '''#include +void func(void) { printf("This won't work.\n"); } +''' + +foreach compiler : [meson.get_compiler('c'), meson.get_compiler('cpp')] + if compiler.compiles(code, name : 'should succeed') == false + error('Compiler ' + compiler.get_id() + ' is fail.') + endif + + if compiler.compiles(files('valid.c'), name : 'should succeed') == false + error('Compiler ' + compiler.get_id() + ' is fail.') + endif + + if compiler.compiles(breakcode, name : 'should fail') + error('Compiler ' + compiler.get_id() + ' returned true on broken code.') + endif + + if compiler.compiles(files('invalid.c'), name : 'should fail') + error('Compiler ' + compiler.get_id() + ' returned true on broken code.') + endif +endforeach diff --git a/meson/test cases/common/28 try compile/valid.c b/meson/test cases/common/28 try compile/valid.c new file mode 100644 index 000000000..f8e76f4ee --- /dev/null +++ b/meson/test cases/common/28 try compile/valid.c @@ -0,0 +1,2 @@ +#include +void func(void) { printf("Something.\n"); } diff --git a/meson/test cases/common/29 compiler id/meson.build b/meson/test cases/common/29 compiler id/meson.build new file mode 100644 index 000000000..280d4f773 --- /dev/null +++ b/meson/test cases/common/29 compiler id/meson.build @@ -0,0 +1,15 @@ +project('compiler_id') + +foreach lang : ['c', 'cpp', 'fortran', 'objc', 'objcpp'] + + if not add_languages(lang, required: false) + continue + endif + + comp = meson.get_compiler(lang) + + message(lang + ' compiler name is: ' + comp.get_id()) + + message(lang + ' linker name is: ' + comp.get_linker_id()) + +endforeach \ No newline at end of file diff --git a/meson/test cases/common/3 static/libfile.c b/meson/test cases/common/3 static/libfile.c new file mode 100644 index 000000000..846485eff --- /dev/null +++ b/meson/test cases/common/3 static/libfile.c @@ -0,0 +1,3 @@ +int libfunc(void) { + return 3; +} diff --git a/meson/test cases/common/3 static/libfile2.c b/meson/test cases/common/3 static/libfile2.c new file mode 100644 index 000000000..4df60c549 --- /dev/null +++ b/meson/test cases/common/3 static/libfile2.c @@ -0,0 +1,3 @@ +int libfunc2(void) { + return 4; +} diff --git a/meson/test cases/common/3 static/meson.build b/meson/test cases/common/3 static/meson.build new file mode 100644 index 000000000..04ff2f6f3 --- /dev/null +++ b/meson/test cases/common/3 static/meson.build @@ -0,0 +1,14 @@ +project('static library test', 'c') + +lib = static_library('mylib', get_option('source'), + link_args : '-THISMUSTNOBEUSED') # Static linker needs to ignore all link args. +assert(lib.name() == 'mylib') +has_not_changed = false +if is_disabler(lib) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Static library has changed.') + +assert(not is_disabler(lib), 'Static library is a disabler.') diff --git a/meson/test cases/common/3 static/meson_options.txt b/meson/test cases/common/3 static/meson_options.txt new file mode 100644 index 000000000..7261a1992 --- /dev/null +++ b/meson/test cases/common/3 static/meson_options.txt @@ -0,0 +1 @@ +option('source', type : 'combo', choices : ['libfile.c', 'libfile2.c'], value : 'libfile.c') diff --git a/meson/test cases/common/30 sizeof/config.h.in b/meson/test cases/common/30 sizeof/config.h.in new file mode 100644 index 000000000..a442e8a4e --- /dev/null +++ b/meson/test cases/common/30 sizeof/config.h.in @@ -0,0 +1,2 @@ +#define INTSIZE @INTSIZE@ +#define WCHARSIZE @WCHARSIZE@ diff --git a/meson/test cases/common/30 sizeof/meson.build b/meson/test cases/common/30 sizeof/meson.build new file mode 100644 index 000000000..9de5b7816 --- /dev/null +++ b/meson/test cases/common/30 sizeof/meson.build @@ -0,0 +1,33 @@ +project('sizeof', 'c', 'cpp') + +# Test with C +cc = meson.get_compiler('c') + +intsize = cc.sizeof('int') +wcharsize = cc.sizeof('wchar_t', prefix : '#include') + +cd = configuration_data() +cd.set('INTSIZE', intsize) +cd.set('WCHARSIZE', wcharsize) +cd.set('CONFIG', 'config.h') +configure_file(input : 'config.h.in', output : 'config.h', configuration : cd) +s = configure_file(input : 'prog.c.in', output : 'prog.c', configuration : cd) + +e = executable('prog', s) +test('sizeof test', e) + +# Test with C++ +cpp = meson.get_compiler('cpp') + +intsize = cpp.sizeof('int') +wcharsize = cpp.sizeof('wchar_t', prefix : '#include') + +cdpp = configuration_data() +cdpp.set('INTSIZE', intsize) +cdpp.set('WCHARSIZE', wcharsize) +cdpp.set('CONFIG', 'config.hpp') +configure_file(input : 'config.h.in', output : 'config.hpp', configuration : cdpp) +spp = configure_file(input : 'prog.c.in', output : 'prog.cc', configuration : cdpp) + +epp = executable('progpp', spp) +test('sizeof test c++', epp) diff --git a/meson/test cases/common/30 sizeof/prog.c.in b/meson/test cases/common/30 sizeof/prog.c.in new file mode 100644 index 000000000..44918ecc4 --- /dev/null +++ b/meson/test cases/common/30 sizeof/prog.c.in @@ -0,0 +1,15 @@ +#include "@CONFIG@" +#include +#include + +int main(void) { + if(INTSIZE != sizeof(int)) { + fprintf(stderr, "Mismatch: detected int size %d, actual size %d.\n", INTSIZE, (int)sizeof(int)); + return 1; + } + if(WCHARSIZE != sizeof(wchar_t)) { + fprintf(stderr, "Mismatch: detected wchar size %d, actual size %d.\n", WCHARSIZE, (int)sizeof(wchar_t)); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/31 define10/config.h.in b/meson/test cases/common/31 define10/config.h.in new file mode 100644 index 000000000..dc7734637 --- /dev/null +++ b/meson/test cases/common/31 define10/config.h.in @@ -0,0 +1,2 @@ +#mesondefine ONE +#mesondefine ZERO diff --git a/meson/test cases/common/31 define10/meson.build b/meson/test cases/common/31 define10/meson.build new file mode 100644 index 000000000..a28e7e48d --- /dev/null +++ b/meson/test cases/common/31 define10/meson.build @@ -0,0 +1,12 @@ +project('set10test', 'c') + +conf = configuration_data() +conf.set10('ONE', true) +conf.set10('ZERO', false) + +configure_file(input : 'config.h.in', + output : 'config.h', + configuration : conf) + +exe = executable('prog', 'prog.c') +test('10test', exe) diff --git a/meson/test cases/common/31 define10/prog.c b/meson/test cases/common/31 define10/prog.c new file mode 100644 index 000000000..519b40dcb --- /dev/null +++ b/meson/test cases/common/31 define10/prog.c @@ -0,0 +1,13 @@ +#include +#include"config.h" + +int main(void) { + if(ONE != 1) { + fprintf(stderr, "ONE is not 1.\n"); + return 1; + } + if(ZERO != 0) { + fprintf(stderr, "ZERO is not 0.\n"); + } + return 0; +} diff --git a/meson/test cases/common/32 has header/meson.build b/meson/test cases/common/32 has header/meson.build new file mode 100644 index 000000000..8096763a9 --- /dev/null +++ b/meson/test cases/common/32 has header/meson.build @@ -0,0 +1,54 @@ +project('has header', 'c', 'cpp') + +host_system = host_machine.system() + +non_existent_header = 'ouagadougou.h' + +# Copy it into the builddir to ensure that it isn't found even if it's there +configure_file(input : non_existent_header, + output : non_existent_header, + configuration : configuration_data()) + +# Test that the fallback to __has_include also works on all compilers +if host_system != 'darwin' + fallbacks = ['', '\n#undef __has_include'] +else + # On Darwin's clang you can't redefine builtin macros so the above doesn't work + fallbacks = [''] +endif + +foreach fallback : fallbacks + foreach comp : [meson.get_compiler('c'), meson.get_compiler('cpp')] + assert(comp.has_header('stdio.h', prefix : fallback), 'Stdio missing.') + + # stdio.h doesn't actually need stdlib.h, but just test that setting the + # prefix does not result in an error. + assert(comp.has_header('stdio.h', prefix : '#include ' + fallback), + 'Stdio missing.') + + # XInput.h should not require type definitions from windows.h, but it does + # require macro definitions. Specifically, it requires an arch setting for + # VS2015 at least. + # We only do this check on MSVC because MinGW often defines its own wrappers + # that pre-include windows.h + if comp.get_id() == 'msvc' + assert(comp.has_header('XInput.h', prefix : '#include ' + fallback), + 'XInput.h should not be missing on Windows') + assert(comp.has_header('XInput.h', prefix : '#define _X86_' + fallback), + 'XInput.h should not need windows.h') + endif + + # Test that the following GCC bug doesn't happen: + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80005 + # https://github.com/mesonbuild/meson/issues/1458 + if host_system == 'linux' + assert(comp.has_header('linux/if.h', prefix : fallback), + 'Could not find ') + endif + + # This header exists in the source and the builddir, but we still must not + # find it since we are looking in the system directories. + assert(not comp.has_header(non_existent_header, prefix : fallback), + 'Found non-existent header.') + endforeach +endforeach diff --git a/meson/test cases/common/32 has header/ouagadougou.h b/meson/test cases/common/32 has header/ouagadougou.h new file mode 100644 index 000000000..2f76c49cc --- /dev/null +++ b/meson/test cases/common/32 has header/ouagadougou.h @@ -0,0 +1 @@ +#define OMG_THIS_SHOULDNT_BE_FOUND diff --git a/meson/test cases/common/33 run program/check-env.py b/meson/test cases/common/33 run program/check-env.py new file mode 100644 index 000000000..7f106c633 --- /dev/null +++ b/meson/test cases/common/33 run program/check-env.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import os + +assert os.environ['MY_PATH'] == os.pathsep.join(['0', '1', '2']) + diff --git a/meson/test cases/common/33 run program/get-version.py b/meson/test cases/common/33 run program/get-version.py new file mode 100644 index 000000000..a22d55998 --- /dev/null +++ b/meson/test cases/common/33 run program/get-version.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +print('1.2') diff --git a/meson/test cases/common/33 run program/meson.build b/meson/test cases/common/33 run program/meson.build new file mode 100644 index 000000000..8e472fd64 --- /dev/null +++ b/meson/test cases/common/33 run program/meson.build @@ -0,0 +1,85 @@ +project('run command', version : run_command('get-version.py', check : true).stdout().strip()) + +if build_machine.system() == 'windows' + c = run_command('cmd', '/c', 'echo', 'hello') +else + c = run_command('echo', 'hello') +endif + +correct = 'hello' + +if c.returncode() != 0 + error('Executing echo failed.') +endif + +result = c.stdout().strip() + +if result != correct + error('Getting stdout failed.') +endif + +if c.stderr() != '' + error('Extra text in stderr.') +endif + +# Now the same with a script. + +if build_machine.system() == 'windows' + cs = run_command('scripts/hello.bat') +else + cs = run_command('scripts/hello.sh') +endif + +if cs.returncode() != 0 + error('Executing script failed.') +endif + +if cs.stdout().strip() != correct + error('Getting stdout failed (script).') +endif + +if cs.stderr() != '' + error('Extra text in stderr (script).') +endif + +# We should be able to have files() in argument +f = files('meson.build') + +if build_machine.system() == 'windows' + c = run_command('cmd', '/c', 'echo', f) +else + c = run_command('echo', f) +endif + +if c.returncode() != 0 + error('Using files() in argument failed.') +endif + +py3 = import('python3').find_python() + +ret = run_command(py3, '-c', 'print("some output")') +assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr()) +assert(ret.stdout() == 'some output\n', 'failed to run python3') + +ret = run_command(py3, '-c', 'print("some output")', capture : false) +assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr()) +assert(ret.stdout() == '', 'stdout is "@0@" instead of empty'.format(ret.stdout())) + +c_env = environment() +c_env.append('CUSTOM_ENV_VAR', 'FOOBAR') +ret = run_command(py3, '-c', 'import os; print(os.environ.get("CUSTOM_ENV_VAR"))', env : c_env) +assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr()) +assert(ret.stdout() == 'FOOBAR\n', 'stdout is "@0@" instead of FOOBAR'.format(ret.stdout())) + +dd = find_program('dd', required : false) +if dd.found() + ret = run_command(dd, 'if=/dev/urandom', 'bs=10', 'count=1', capture: false) + assert(ret.returncode() == 0, 'failed to run dd: ' + ret.stderr()) + assert(ret.stdout() == '', 'stdout is "@0@" instead of empty'.format(ret.stdout())) +endif + +env = environment() +env.append('MY_PATH', '1') +env.append('MY_PATH', '2') +env.prepend('MY_PATH', '0') +run_command('check-env.py', env: env, check: true) diff --git a/meson/test cases/common/33 run program/scripts/hello.bat b/meson/test cases/common/33 run program/scripts/hello.bat new file mode 100644 index 000000000..cbc346bf7 --- /dev/null +++ b/meson/test cases/common/33 run program/scripts/hello.bat @@ -0,0 +1,2 @@ +@ECHO OFF +ECHO hello diff --git a/meson/test cases/common/33 run program/scripts/hello.sh b/meson/test cases/common/33 run program/scripts/hello.sh new file mode 100755 index 000000000..2a22daa88 --- /dev/null +++ b/meson/test cases/common/33 run program/scripts/hello.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +echo hello diff --git a/meson/test cases/common/34 logic ops/meson.build b/meson/test cases/common/34 logic ops/meson.build new file mode 100644 index 000000000..897054e74 --- /dev/null +++ b/meson/test cases/common/34 logic ops/meson.build @@ -0,0 +1,95 @@ +project('logicopts', 'c') + +t = true +f = false + +if (true) + message('Ok.') +else + error('Not ok.') +endif + +if (false) + error('Not ok.') +else + message('Ok.') +endif + +if (f) + error('Not ok.') +else + message('Ok.') +endif + +if (t) + message('Ok.') +else + error('Not ok.') +endif + +if true and t + message('Ok.') +else + error('Not ok.') +endif + +if t and false + error('Not ok.') +else + message('Ok.') +endif + +if f and t + error('Not ok.') +else + message('Ok.') +endif + +if f or false + error('Not ok.') +else + message('Ok.') +endif + +if true or f + message('Ok.') +else + error('Not ok.') +endif + +if t or true + message('Ok.') +else + error('Not ok.') +endif + +if not true + error('Negation failed.') +else + message('Ok.') +endif + +if not f + message('Ok.') +else + error('Negation failed.') +endif + + +if f or f or f or f or f or f or f or f or t + message('Ok.') +else + error('Chain of ors failed.') +endif + +if t and t and t and t and t and t and t and t and f + error('Chain of ands failed.') +else + message('Ok.') +endif + +if t and t or t + message('Ok.') +else + error('Combination of and-or failed.') +endif diff --git a/meson/test cases/common/35 string operations/meson.build b/meson/test cases/common/35 string operations/meson.build new file mode 100644 index 000000000..ca0342daf --- /dev/null +++ b/meson/test cases/common/35 string operations/meson.build @@ -0,0 +1,122 @@ +project('string formatting', 'c') + +templ = '@0@bar@1@' + +assert(templ.format('foo', 'baz') == 'foobarbaz', 'Basic string formatting is broken.') + +assert('@0@'.format(1) == '1', 'String number formatting is broken.') + +assert('@0@'.format(true) == 'true', 'String boolean formatting is broken.') + +templ2 = '@0@' +subs2 = '42' + +assert(templ2.format(subs2) == '42', 'String formatting with variables is broken.') + +assert('@@0@@ @@1@@'.format(1, 2) == '@1@ @2@', 'String format is recursive.') + +long = 'abcde' +prefix = 'abc' +suffix = 'cde' + +assert(long.replace('b', 'd') == 'adcde') +assert(long.replace('z', 'x') == long) +assert(long.replace(prefix, suffix) == 'cdede') + +assert(long.startswith(prefix), 'Prefix.') + +assert(not long.startswith(suffix), 'Not prefix.') + +assert(long.endswith(suffix), 'Suffix.') + +assert(not long.endswith(prefix), 'Not suffix.') + +assert(long.contains(prefix), 'Does not contain prefix') + +assert(long.contains(suffix), 'Does not contain suffix') + +assert(long.contains('bcd'), 'Does not contain middle part') + +assert(not long.contains('dc'), 'Broken contains') + +assert(long.to_upper() == 'ABCDE', 'Broken to_upper') + +assert(long.to_upper().to_lower() == long, 'Broken to_lower') + +assert('struct stat.st_foo'.underscorify() == 'struct_stat_st_foo', 'Broken underscorify') + +assert('#include '.underscorify() == '_include__foo_bar_h_', 'Broken underscorify') + +# case should not change, space should be replaced, numbers are ok too +assert('Do SomeThing 09'.underscorify() == 'Do_SomeThing_09', 'Broken underscorify') + +assert('3'.to_int() == 3, 'String int conversion does not work.') + +assert(true.to_string() == 'true', 'bool string conversion failed') +assert(false.to_string() == 'false', 'bool string conversion failed') +assert(true.to_string('yes', 'no') == 'yes', 'bool string conversion with args failed') +assert(false.to_string('yes', 'no') == 'no', 'bool string conversion with args failed') +assert('@0@'.format(true) == 'true', 'bool string formatting failed') + +assert(' '.join(['a', 'b', 'c']) == 'a b c', 'join() array broken') +assert(''.join(['a', 'b', 'c']) == 'abc', 'empty join() broken') +assert(' '.join(['a']) == 'a', 'single join broken') + +version_number = '1.2.8' + +assert(version_number.version_compare('>=1.2.8'), 'Version_compare gt broken') +assert(not version_number.version_compare('>1.2.8'), 'Version_compare greater broken') +assert(not version_number.version_compare('<1.2.8'), 'Version_compare less broken') +assert(version_number.version_compare('<=1.2.8'), 'Version_compare le broken') +assert(version_number.version_compare('==1.2.8'), 'Version_compare eq broken') +assert(not version_number.version_compare('!=1.2.8'), 'Version_compare neq broken') + +assert(version_number.version_compare('<2.0'), 'Version_compare major less broken') +assert(version_number.version_compare('>0.9'), 'Version_compare major greater broken') + +assert(' spaces tabs '.strip() == 'spaces tabs', 'Spaces and tabs badly stripped') +assert(''' +multiline string '''.strip() == '''multiline string''', 'Newlines badly stripped') +assert('"1.1.20"'.strip('"') == '1.1.20', '" badly stripped') +assert('"1.1.20"'.strip('".') == '1.1.20', '". badly stripped') +assert('"1.1.20" '.strip('" ') == '1.1.20', '". badly stripped') + +bs_c = '''\c''' +bs_bs_c = '''\\c''' +nl = ''' +''' +bs_n = '''\n''' +bs_nl = '''\ +''' +bs_bs_n = '''\\n''' +bs_bs_nl = '''\\ +''' +bs_bs = '''\\''' +bs = '''\''' + +assert('\c' == bs_c, 'Single backslash broken') +assert('\\c' == bs_c, 'Double backslash broken') +assert('\\\c' == bs_bs_c, 'Three backslash broken') +assert('\\\\c' == bs_bs_c, 'Four backslash broken') +assert('\n' == nl, 'Newline escape broken') +assert('\\n' == bs_n, 'Double backslash broken before n') +assert('\\\n' == bs_nl, 'Three backslash broken before n') +assert('\\\\n' == bs_bs_n, 'Four backslash broken before n') +assert('\\\\\n' == bs_bs_nl, 'Five backslash broken before n') +assert('\\\\' == bs_bs, 'Double-backslash broken') +assert('\\' == bs, 'Backslash broken') + +mysubstring='foobarbaz' +assert(mysubstring.substring() == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(0) == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(1) == 'oobarbaz', 'substring is broken') +assert(mysubstring.substring(-5) == 'arbaz', 'substring is broken') +assert(mysubstring.substring(1, 4) == 'oob', 'substring is broken') +assert(mysubstring.substring(1,-5) == 'oob', 'substring is broken') +assert(mysubstring.substring(1, 0) == '', 'substring is broken') +assert(mysubstring.substring(0, 100) == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(-1, -5) == '', 'substring is broken') +assert(mysubstring.substring(10, -25) == '', 'substring is broken') +assert(mysubstring.substring(-4, 2) == '', 'substring is broken') +assert(mysubstring.substring(10, 9) == '', 'substring is broken') +assert(mysubstring.substring(8, 10) == 'z', 'substring is broken') diff --git a/meson/test cases/common/36 has function/meson.build b/meson/test cases/common/36 has function/meson.build new file mode 100644 index 000000000..a3f0a3c9d --- /dev/null +++ b/meson/test cases/common/36 has function/meson.build @@ -0,0 +1,116 @@ +project('has function', 'c', 'cpp') + +host_system = host_machine.system() + +# This is used in the `test_compiler_check_flags_order` unit test +unit_test_args = '-I/tmp' +defines_has_builtin = '''#ifndef __has_builtin +#error "no __has_builtin" +#endif +''' +compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')] + +foreach cc : compilers + if not cc.has_function('printf', prefix : '#include', + args : unit_test_args) + error('"printf" function not found (should always exist).') + endif + + # Should also be able to detect it without specifying the header + # We check for a different function here to make sure the result is + # not taken from a cache (ie. the check above) + # On MSVC fprintf is defined as an inline function in the header, so it cannot + # be found without the include. + if not ['msvc', 'intel-cl'].contains(cc.get_id()) + assert(cc.has_function('fprintf', args : unit_test_args), + '"fprintf" function not found without include (on !msvc).') + else + assert(cc.has_function('fprintf', prefix : '#include ', + args : unit_test_args), + '"fprintf" function not found with include (on msvc).') + # Compiler intrinsics + assert(cc.has_function('strcmp'), + 'strcmp intrinsic should have been found on MSVC') + assert(cc.has_function('strcmp', prefix : '#include '), + 'strcmp intrinsic should have been found with #include on MSVC') + endif + + if cc.has_function('hfkerhisadf', prefix : '#include', + args : unit_test_args) + error('Found non-existent function "hfkerhisadf".') + endif + + if cc.has_function('hfkerhisadf', args : unit_test_args) + error('Found non-existent function "hfkerhisadf".') + endif + + # With glibc (before 2.32, see below) on Linux, lchmod is a stub that will + # always return an error, we want to detect that and declare that the + # function is not available. + # We can't check for the C library used here of course, but the main + # alternative Linux C library (musl) doesn't use glibc's stub mechanism; + # also, it has implemented lchmod since 2013, so it should be safe to check + # that lchmod is available on Linux when not using glibc. + if host_system == 'linux' or host_system == 'darwin' + assert (cc.has_function('poll', prefix : '#include ', + args : unit_test_args), + 'couldn\'t detect "poll" when defined by a header') + lchmod_prefix = '#include \n#include ' + has_lchmod = cc.has_function('lchmod', prefix : lchmod_prefix, args : unit_test_args) + + if host_system == 'linux' + # __GLIBC__ macro can be retrieved by including almost any C library header + glibc_major = cc.get_define('__GLIBC__', prefix: '#include ', args: unit_test_args) + # __GLIBC__ will only be set for glibc + if glibc_major != '' + glibc_minor = cc.get_define('__GLIBC_MINOR__', prefix: '#include ', args: unit_test_args) + glibc_vers = '@0@.@1@'.format(glibc_major, glibc_minor) + message('GLIBC version:', glibc_vers) + + # lchmod was implemented in glibc 2.32 (https://sourceware.org/pipermail/libc-announce/2020/000029.html) + if glibc_vers.version_compare('<2.32') + assert (not has_lchmod, '"lchmod" check should have failed') + else + assert (has_lchmod, '"lchmod" check should have succeeded') + endif + else + # Other C libraries for Linux should have lchmod + assert (has_lchmod, '"lchmod" check should have succeeded') + endif + else + # macOS and *BSD have lchmod + assert (has_lchmod, '"lchmod" check should have succeeded') + endif + # Check that built-ins are found properly both with and without headers + assert(cc.has_function('alloca', args : unit_test_args), + 'built-in alloca must be found on ' + host_system) + assert(cc.has_function('alloca', prefix : '#include ', + args : unit_test_args), + 'built-in alloca must be found with #include') + if not cc.compiles(defines_has_builtin, args : unit_test_args) + assert(not cc.has_function('alloca', + prefix : '#include \n#undef alloca', + args : unit_test_args), + 'built-in alloca must not be found with #include and #undef') + endif + endif + + # For some functions one needs to define _GNU_SOURCE before including the + # right headers to get them picked up. Make sure we can detect these functions + # as well without any prefix + if cc.has_header_symbol('sys/socket.h', 'recvmmsg', + prefix : '#define _GNU_SOURCE', + args : unit_test_args) + # We assume that if recvmmsg exists sendmmsg does too + assert (cc.has_function('sendmmsg', args : unit_test_args), + 'Failed to detect function "sendmmsg" (should always exist).') + endif + + # We should be able to find GCC and Clang __builtin functions + if ['gcc', 'clang'].contains(cc.get_id()) + # __builtin_constant_p is documented to exist at least as far back as + # GCC 2.95.3 + assert(cc.has_function('__builtin_constant_p', args : unit_test_args), + '__builtin_constant_p must be found under gcc and clang') + endif +endforeach diff --git a/meson/test cases/common/37 has member/meson.build b/meson/test cases/common/37 has member/meson.build new file mode 100644 index 000000000..4e61956d9 --- /dev/null +++ b/meson/test cases/common/37 has member/meson.build @@ -0,0 +1,21 @@ +project('has member', 'c', 'cpp') + +compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')] + +foreach cc : compilers + if not cc.has_member('struct tm', 'tm_sec', prefix : '#include') + error('Did not detect member of "struct tm" that exists: "tm_sec"') + endif + + if cc.has_member('struct tm', 'tm_nonexistent', prefix : '#include') + error('Not existing member "tm_nonexistent" found.') + endif + + if not cc.has_members('struct tm', 'tm_sec', 'tm_min', prefix : '#include') + error('Did not detect members of "struct tm" that exist: "tm_sec" "tm_min"') + endif + + if cc.has_members('struct tm', 'tm_sec', 'tm_nonexistent2', prefix : '#include') + error('Not existing member "tm_nonexistent2" found.') + endif +endforeach diff --git a/meson/test cases/common/38 alignment/meson.build b/meson/test cases/common/38 alignment/meson.build new file mode 100644 index 000000000..a9bd65b73 --- /dev/null +++ b/meson/test cases/common/38 alignment/meson.build @@ -0,0 +1,31 @@ +project('alignment', 'c', 'cpp') + +compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')] + +foreach cc : compilers + # These tests should return the same value on all + # platforms. If (and when) they don't, fix 'em up. + if cc.alignment('char') != 1 + error('Alignment of char misdetected.') + endif + + ptr_size = cc.sizeof('void*') + dbl_alignment = cc.alignment('double') + + # These tests are not thorough. Doing this properly + # would take a lot of work because it is strongly + # platform and compiler dependent. So just check + # that they produce something fairly sane. + + if ptr_size == 8 or ptr_size == 4 + message('Size of ptr ok.') + else + error('Size of ptr misdetected.') + endif + + if dbl_alignment == 8 or dbl_alignment == 4 + message('Alignment of double ok.') + else + error('Alignment of double misdetected.') + endif +endforeach diff --git a/meson/test cases/common/39 library chain/main.c b/meson/test cases/common/39 library chain/main.c new file mode 100644 index 000000000..9c88e73ae --- /dev/null +++ b/meson/test cases/common/39 library chain/main.c @@ -0,0 +1,5 @@ +int libfun(void); + +int main(void) { + return libfun(); +} diff --git a/meson/test cases/common/39 library chain/meson.build b/meson/test cases/common/39 library chain/meson.build new file mode 100644 index 000000000..77528d7d0 --- /dev/null +++ b/meson/test cases/common/39 library chain/meson.build @@ -0,0 +1,5 @@ +project('libchain', 'c') + +subdir('subdir') +e = executable('prog', 'main.c', link_with : lib1, install : true) +test('tst', e) diff --git a/meson/test cases/common/39 library chain/subdir/lib1.c b/meson/test cases/common/39 library chain/subdir/lib1.c new file mode 100644 index 000000000..88436b27e --- /dev/null +++ b/meson/test cases/common/39 library chain/subdir/lib1.c @@ -0,0 +1,17 @@ +int lib2fun(void); +int lib3fun(void); + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC libfun(void) { + return lib2fun() + lib3fun(); +} diff --git a/meson/test cases/common/39 library chain/subdir/meson.build b/meson/test cases/common/39 library chain/subdir/meson.build new file mode 100644 index 000000000..ab71bda18 --- /dev/null +++ b/meson/test cases/common/39 library chain/subdir/meson.build @@ -0,0 +1,4 @@ +subdir('subdir2') +subdir('subdir3') + +lib1 = shared_library('lib1', 'lib1.c', install : false, link_with : [lib2, lib3]) diff --git a/meson/test cases/common/39 library chain/subdir/subdir2/lib2.c b/meson/test cases/common/39 library chain/subdir/subdir2/lib2.c new file mode 100644 index 000000000..e788f0795 --- /dev/null +++ b/meson/test cases/common/39 library chain/subdir/subdir2/lib2.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC lib2fun(void) { + return 0; +} diff --git a/meson/test cases/common/39 library chain/subdir/subdir2/meson.build b/meson/test cases/common/39 library chain/subdir/subdir2/meson.build new file mode 100644 index 000000000..befd94db8 --- /dev/null +++ b/meson/test cases/common/39 library chain/subdir/subdir2/meson.build @@ -0,0 +1 @@ +lib2 = shared_library('lib2', 'lib2.c', install : false) diff --git a/meson/test cases/common/39 library chain/subdir/subdir3/lib3.c b/meson/test cases/common/39 library chain/subdir/subdir3/lib3.c new file mode 100644 index 000000000..a99c64e66 --- /dev/null +++ b/meson/test cases/common/39 library chain/subdir/subdir3/lib3.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC lib3fun(void) { + return 0; +} diff --git a/meson/test cases/common/39 library chain/subdir/subdir3/meson.build b/meson/test cases/common/39 library chain/subdir/subdir3/meson.build new file mode 100644 index 000000000..7bd249a29 --- /dev/null +++ b/meson/test cases/common/39 library chain/subdir/subdir3/meson.build @@ -0,0 +1 @@ +lib3 = shared_library('lib3', 'lib3.c', install : false) diff --git a/meson/test cases/common/39 library chain/test.json b/meson/test cases/common/39 library chain/test.json new file mode 100644 index 000000000..135300de5 --- /dev/null +++ b/meson/test cases/common/39 library chain/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/prog"}, + {"type": "pdb", "file": "usr/bin/prog"} + ] +} diff --git a/meson/test cases/common/4 shared/libfile.c b/meson/test cases/common/4 shared/libfile.c new file mode 100644 index 000000000..0797eecc4 --- /dev/null +++ b/meson/test cases/common/4 shared/libfile.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC libfunc(void) { + return 3; +} diff --git a/meson/test cases/common/4 shared/meson.build b/meson/test cases/common/4 shared/meson.build new file mode 100644 index 000000000..1c88bc587 --- /dev/null +++ b/meson/test cases/common/4 shared/meson.build @@ -0,0 +1,13 @@ +project('shared library test', 'c') +lib = shared_library('mylib', 'libfile.c') +build_target('mylib2', 'libfile.c', target_type: 'shared_library') + +has_not_changed = false +if is_disabler(lib) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Shared library has changed.') + +assert(not is_disabler(lib), 'Shared library is a disabler.') diff --git a/meson/test cases/common/40 options/meson.build b/meson/test cases/common/40 options/meson.build new file mode 100644 index 000000000..2eccef7b0 --- /dev/null +++ b/meson/test cases/common/40 options/meson.build @@ -0,0 +1,45 @@ +project('options', 'c') + +if get_option('testoption') != 'optval' + error('Incorrect value to test option') +endif + +if get_option('other_one') != false + error('Incorrect value to boolean option.') +endif + +if get_option('combo_opt') != 'combo' + error('Incorrect value to combo option.') +endif + +if get_option('array_opt') != ['one', 'two'] + message(get_option('array_opt')) + error('Incorrect value for array option') +endif + +# If the default changes, update test cases/unit/13 reconfigure +if get_option('b_lto') != false + error('Incorrect value in base option.') +endif + +if get_option('includedir') != 'include' + error('Incorrect value in builtin option.') +endif + +if get_option('integer_opt') != 3 + error('Incorrect value in integer option.') +endif + +if get_option('neg_int_opt') != -3 + error('Incorrect value in negative integer option.') +endif + +if get_option('CaseSenSiTivE') != 'Some CAPS' + error('Incorrect value in mixed caps option.') +endif + +if get_option('CASESENSITIVE') != 'ALL CAPS' + error('Incorrect value in all caps option.') +endif + +assert(get_option('wrap_mode') == 'default', 'Wrap mode option is broken.') diff --git a/meson/test cases/common/40 options/meson_options.txt b/meson/test cases/common/40 options/meson_options.txt new file mode 100644 index 000000000..8067eae17 --- /dev/null +++ b/meson/test cases/common/40 options/meson_options.txt @@ -0,0 +1,9 @@ +option('testoption', type : 'string', value : 'optval', description : 'An option ' + 'to do something') +option('other_one', type : 'boolean', value : not (not (not (not false)))) +option('combo_opt', type : 'co' + 'mbo', choices : ['one', 'two', 'combo'], value : 'combo') +option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two']) +option('free_array_opt', type : 'array') +option('integer_opt', type : 'integer', min : 0, max : -(-5), value : 3) +option('neg' + '_' + 'int' + '_' + 'opt', type : 'integer', min : -5, max : 5, value : -3) +option('CaseSenSiTivE', type : 'string', value: 'Some CAPS', description : 'An option with mixed capitaliziation') +option('CASESENSITIVE', type : 'string', value: 'ALL CAPS', description : 'An option with all caps') diff --git a/meson/test cases/common/41 test args/cmd_args.c b/meson/test cases/common/41 test args/cmd_args.c new file mode 100644 index 000000000..545b795c5 --- /dev/null +++ b/meson/test cases/common/41 test args/cmd_args.c @@ -0,0 +1,18 @@ +#include +#include + +int main(int argc, char **argv) { + if(argc != 3) { + fprintf(stderr, "Incorrect number of arguments.\n"); + return 1; + } + if(strcmp(argv[1], "first") != 0) { + fprintf(stderr, "First argument is wrong.\n"); + return 1; + } + if(strcmp(argv[2], "second") != 0) { + fprintf(stderr, "Second argument is wrong.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/41 test args/copyfile.py b/meson/test cases/common/41 test args/copyfile.py new file mode 100644 index 000000000..ff42ac359 --- /dev/null +++ b/meson/test cases/common/41 test args/copyfile.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/41 test args/env2vars.c b/meson/test cases/common/41 test args/env2vars.c new file mode 100644 index 000000000..e940c9a8c --- /dev/null +++ b/meson/test cases/common/41 test args/env2vars.c @@ -0,0 +1,23 @@ +#include +#include +#include + +int main(void) { + if(strcmp(getenv("first"), "something-else") != 0) { + fprintf(stderr, "First envvar is wrong. %s\n", getenv("first")); + return 1; + } + if(strcmp(getenv("second"), "val2") != 0) { + fprintf(stderr, "Second envvar is wrong.\n"); + return 1; + } + if(strcmp(getenv("third"), "val3:and_more") != 0) { + fprintf(stderr, "Third envvar is wrong.\n"); + return 1; + } + if(strstr(getenv("PATH"), "fakepath:") != NULL) { + fprintf(stderr, "Third envvar is wrong.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/41 test args/envvars.c b/meson/test cases/common/41 test args/envvars.c new file mode 100644 index 000000000..086b0be34 --- /dev/null +++ b/meson/test cases/common/41 test args/envvars.c @@ -0,0 +1,23 @@ +#include +#include +#include + +int main(void) { + if(strcmp(getenv("first"), "val1") != 0) { + fprintf(stderr, "First envvar is wrong. %s\n", getenv("first")); + return 1; + } + if(strcmp(getenv("second"), "val2") != 0) { + fprintf(stderr, "Second envvar is wrong.\n"); + return 1; + } + if(strcmp(getenv("third"), "val3:and_more") != 0) { + fprintf(stderr, "Third envvar is wrong.\n"); + return 1; + } + if(strstr(getenv("PATH"), "fakepath:") != NULL) { + fprintf(stderr, "Third envvar is wrong.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/41 test args/meson.build b/meson/test cases/common/41 test args/meson.build new file mode 100644 index 000000000..b21f1ad00 --- /dev/null +++ b/meson/test cases/common/41 test args/meson.build @@ -0,0 +1,35 @@ +project('test features', 'c') + +e1 = executable('cmd_args', 'cmd_args.c') +e2 = executable('envvars', 'envvars.c') +e3 = executable('env2vars', 'env2vars.c') + +env = environment() +env.set('first', 'val1') +env.set('second', 'val2') +env.set('third', 'val3', 'and_more', separator: ':') +env.append('PATH', 'fakepath', separator: ':') + +# Make sure environment objects are copied on assignment and we can +# change the copy without affecting the original environment object. +env2 = env +env2.set('first', 'something-else') + +test('command line arguments', e1, args : ['first', 'second']) +test('environment variables', e2, env : env) +test('environment variables 2', e3, env : env2) + +# https://github.com/mesonbuild/meson/issues/2211#issuecomment-327741571 +env_array = ['MESONTESTING=picklerror'] +testfile = files('testfile.txt') +testerpy = find_program('tester.py') +test('file arg', testerpy, args : testfile, env : [env_array, 'TEST_LIST_FLATTENING=1']) + +copy = find_program('copyfile.py') +tester = executable('tester', 'tester.c') +testfilect = custom_target('testfile', + input : testfile, + output : 'outfile.txt', + build_by_default : true, + command : [copy, '@INPUT@', '@OUTPUT@']) +test('custom target arg', tester, args : testfilect, env : env_array) diff --git a/meson/test cases/common/41 test args/tester.c b/meson/test cases/common/41 test args/tester.c new file mode 100644 index 000000000..419277e5b --- /dev/null +++ b/meson/test cases/common/41 test args/tester.c @@ -0,0 +1,34 @@ +#include +#include +#include +#include + +#ifndef _MSC_VER +#include +#endif + +int main(int argc, char **argv) { + char data[10]; + int fd, size; + + if (argc != 2) { + fprintf(stderr, "Incorrect number of arguments, got %i\n", argc); + return 1; + } + fd = open(argv[1], O_RDONLY); + if (fd < 0) { + fprintf(stderr, "First argument is wrong.\n"); + return 1; + } + + size = read(fd, data, 8); + if (size < 0) { + fprintf(stderr, "Failed to read: %s\n", strerror(errno)); + return 1; + } + if (strncmp(data, "contents", 8) != 0) { + fprintf(stderr, "Contents don't match, got %s\n", data); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/41 test args/tester.py b/meson/test cases/common/41 test args/tester.py new file mode 100755 index 000000000..b5884cc61 --- /dev/null +++ b/meson/test cases/common/41 test args/tester.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 + +import sys +import os + +assert os.environ['MESONTESTING'] == 'picklerror' +assert os.environ['TEST_LIST_FLATTENING'] == '1' + +with open(sys.argv[1]) as f: + if f.read() != 'contents\n': + sys.exit(1) diff --git a/meson/test cases/common/41 test args/testfile.txt b/meson/test cases/common/41 test args/testfile.txt new file mode 100644 index 000000000..12f00e90b --- /dev/null +++ b/meson/test cases/common/41 test args/testfile.txt @@ -0,0 +1 @@ +contents diff --git a/meson/test cases/common/42 subproject/meson.build b/meson/test cases/common/42 subproject/meson.build new file mode 100644 index 000000000..2b939d19b --- /dev/null +++ b/meson/test cases/common/42 subproject/meson.build @@ -0,0 +1,28 @@ +project('subproj user', 'c', + version : '2.3.4', + license : 'mylicense') + +assert(meson.project_name() == 'subproj user', 'Incorrect project name') + +sub = subproject('sublib', version : '1.0.0') + +if meson.project_version() != '2.3.4' + error('Incorrect master project version string:' + meson.project_version()) +endif + +if meson.is_subproject() + error('Claimed to be a subproject even though we are the master project.') +endif + +inc = sub.get_variable('i') +lib = sub.get_variable('l') + +e = executable('user', 'user.c', include_directories : inc, link_with : lib, install : true) +test('subdirtest', e) + +meson.install_dependency_manifest('share/sublib/sublib.depmf') + +unknown_var = sub.get_variable('does-not-exist', []) +if unknown_var != [] + error ('unexpetced fallback value for subproject.get_variable()') +endif diff --git a/meson/test cases/common/42 subproject/subprojects/sublib/include/subdefs.h b/meson/test cases/common/42 subproject/subprojects/sublib/include/subdefs.h new file mode 100644 index 000000000..6ae8462f6 --- /dev/null +++ b/meson/test cases/common/42 subproject/subprojects/sublib/include/subdefs.h @@ -0,0 +1,21 @@ +#ifndef SUBDEFS_H_ +#define SUBDEFS_H_ + +#if defined _WIN32 || defined __CYGWIN__ +#if defined BUILDING_SUB + #define DLL_PUBLIC __declspec(dllexport) +#else + #define DLL_PUBLIC __declspec(dllimport) +#endif +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC subfunc(void); + +#endif diff --git a/meson/test cases/common/42 subproject/subprojects/sublib/meson.build b/meson/test cases/common/42 subproject/subprojects/sublib/meson.build new file mode 100644 index 000000000..acaf1bf38 --- /dev/null +++ b/meson/test cases/common/42 subproject/subprojects/sublib/meson.build @@ -0,0 +1,19 @@ +project('subproject', 'c', + version : '1.0.0', + license : ['sublicense1', 'sublicense2']) + +if not meson.is_subproject() + error('Claimed to be master project even though we are a subproject.') +endif + +assert(meson.project_name() == 'subproject', 'Incorrect subproject name') + +if meson.project_version() != '1.0.0' + error('Incorrect version string in subproject.') +endif + +i = include_directories('include') +l = shared_library('sublib', 'sublib.c', include_directories : i, install : false, + c_args : '-DBUILDING_SUB=2') +t = executable('simpletest', 'simpletest.c', include_directories : i, link_with : l) +test('plain', t) diff --git a/meson/test cases/common/42 subproject/subprojects/sublib/simpletest.c b/meson/test cases/common/42 subproject/subprojects/sublib/simpletest.c new file mode 100644 index 000000000..2184bc68c --- /dev/null +++ b/meson/test cases/common/42 subproject/subprojects/sublib/simpletest.c @@ -0,0 +1,5 @@ +#include + +int main(void) { + return subfunc() == 42 ? 0 : 1; +} diff --git a/meson/test cases/common/42 subproject/subprojects/sublib/sublib.c b/meson/test cases/common/42 subproject/subprojects/sublib/sublib.c new file mode 100644 index 000000000..f71564f14 --- /dev/null +++ b/meson/test cases/common/42 subproject/subprojects/sublib/sublib.c @@ -0,0 +1,5 @@ +#include + +int DLL_PUBLIC subfunc(void) { + return 42; +} diff --git a/meson/test cases/common/42 subproject/test.json b/meson/test cases/common/42 subproject/test.json new file mode 100644 index 000000000..a56106fe1 --- /dev/null +++ b/meson/test cases/common/42 subproject/test.json @@ -0,0 +1,7 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/user"}, + {"type": "pdb", "file": "usr/bin/user"}, + {"type": "file", "file": "usr/share/sublib/sublib.depmf"} + ] +} diff --git a/meson/test cases/common/42 subproject/user.c b/meson/test cases/common/42 subproject/user.c new file mode 100644 index 000000000..918162266 --- /dev/null +++ b/meson/test cases/common/42 subproject/user.c @@ -0,0 +1,16 @@ +#include +#include + + +int main(void) { + int res; + printf("Calling into sublib now.\n"); + res = subfunc(); + if(res == 42) { + printf("Everything is fine.\n"); + return 0; + } else { + printf("Something went wrong.\n"); + return 1; + } +} diff --git a/meson/test cases/common/43 subproject options/meson.build b/meson/test cases/common/43 subproject options/meson.build new file mode 100644 index 000000000..d4598b639 --- /dev/null +++ b/meson/test cases/common/43 subproject options/meson.build @@ -0,0 +1,7 @@ +project('suboptions', 'c') + +subproject('subproject') + +if not get_option('opt') + error('option unset when it should be set') +endif diff --git a/meson/test cases/common/43 subproject options/meson_options.txt b/meson/test cases/common/43 subproject options/meson_options.txt new file mode 100644 index 000000000..c295dddd6 --- /dev/null +++ b/meson/test cases/common/43 subproject options/meson_options.txt @@ -0,0 +1 @@ +option('opt', type : 'boolean', value : true, description : 'main project option') diff --git a/meson/test cases/common/43 subproject options/subprojects/subproject/meson.build b/meson/test cases/common/43 subproject options/subprojects/subproject/meson.build new file mode 100644 index 000000000..5000b8643 --- /dev/null +++ b/meson/test cases/common/43 subproject options/subprojects/subproject/meson.build @@ -0,0 +1,5 @@ +project('subproject', 'c') + +if get_option('opt') + error('option set when it should be unset.') +endif diff --git a/meson/test cases/common/43 subproject options/subprojects/subproject/meson_options.txt b/meson/test cases/common/43 subproject options/subprojects/subproject/meson_options.txt new file mode 100644 index 000000000..ac78533e1 --- /dev/null +++ b/meson/test cases/common/43 subproject options/subprojects/subproject/meson_options.txt @@ -0,0 +1 @@ +option('opt', type : 'boolean', value : false, description : 'subproject option') diff --git a/meson/test cases/common/44 pkgconfig-gen/dependencies/custom.c b/meson/test cases/common/44 pkgconfig-gen/dependencies/custom.c new file mode 100644 index 000000000..1d3deca18 --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/dependencies/custom.c @@ -0,0 +1,3 @@ +int custom_function(void) { + return 42; +} diff --git a/meson/test cases/common/44 pkgconfig-gen/dependencies/exposed.c b/meson/test cases/common/44 pkgconfig-gen/dependencies/exposed.c new file mode 100644 index 000000000..caa45912e --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/dependencies/exposed.c @@ -0,0 +1,3 @@ +int exposed_function(void) { + return 42; +} diff --git a/meson/test cases/common/44 pkgconfig-gen/dependencies/internal.c b/meson/test cases/common/44 pkgconfig-gen/dependencies/internal.c new file mode 100644 index 000000000..23b07ecd4 --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/dependencies/internal.c @@ -0,0 +1,3 @@ +int internal_function(void) { + return 42; +} diff --git a/meson/test cases/common/44 pkgconfig-gen/dependencies/main.c b/meson/test cases/common/44 pkgconfig-gen/dependencies/main.c new file mode 100644 index 000000000..397d40c1c --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/dependencies/main.c @@ -0,0 +1,10 @@ +#include + +#ifndef LIBFOO +#error LIBFOO should be defined in pkgconfig cflags +#endif + +int main(int argc, char *argv[]) +{ + return simple_function() == 42 ? 0 : 1; +} diff --git a/meson/test cases/common/44 pkgconfig-gen/dependencies/meson.build b/meson/test cases/common/44 pkgconfig-gen/dependencies/meson.build new file mode 100644 index 000000000..fb4e6b47c --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/dependencies/meson.build @@ -0,0 +1,62 @@ +project('pkgconfig-gen-dependencies', 'c', version: '1.0') + +pkgg = import('pkgconfig') + +# libmain internally use libinternal and expose libexpose in its API +exposed_lib = shared_library('libexposed', 'exposed.c') +internal_lib = shared_library('libinternal', 'internal.c') +main_lib = both_libraries('libmain', link_with : [exposed_lib, internal_lib]) +custom_lib = shared_library('custom', 'custom.c') + +pkgg.generate(exposed_lib) + +# Declare a few different Dependency objects +pc_dep = dependency('libfoo', version : '>=1.0') +pc_dep_dup = dependency('libfoo', version : '>= 1.0') +notfound_dep = dependency('notfound', required : false) +threads_dep = dependency('threads') +custom_dep = declare_dependency(link_with : custom_lib, compile_args : ['-DCUSTOM']) +custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DCUSTOM2']) + +exe = executable('test1', 'main.c', dependencies : [pc_dep]) +test('Test1', exe) + +# Generate a PC file: +# - Having libmain in libraries should pull implicitly libexposed and libinternal in Libs.private +# - Having libexposed in libraries should remove it from Libs.private +# - We generated a pc file for libexposed so it should be in Requires instead of Libs +# - Having threads_dep in libraries should add '-pthread' in both Libs and Cflags +# - Having custom_dep in libraries and libraries_private should only add it in Libs +# - Having custom2_dep in libraries_private should not add its Cflags +# - Having pc_dep in libraries_private should add it in Requires.private +# - pc_dep_dup is the same library and same version, should be ignored +# - notfound_dep is not required so it shouldn't appear in the pc file. +pkgg.generate(libraries : [main_lib, exposed_lib, threads_dep, threads_dep, custom_dep, custom_dep, '-pthread'], + libraries_private : [custom_dep, custom2_dep, custom2_dep, pc_dep, pc_dep_dup, notfound_dep], + version : '1.0', + name : 'dependency-test', + filebase : 'dependency-test', + description : 'A dependency test.' +) + +pkgg.generate( + name : 'requires-test', + version : '1.0', + description : 'Dependency Requires field test.', + requires : [exposed_lib, pc_dep, 'libhello'], +) + +pkgg.generate( + name : 'requires-private-test', + version : '1.0', + description : 'Dependency Requires.private field test.', + requires_private : [exposed_lib, pc_dep, 'libhello', notfound_dep], +) + +# Verify that if we promote internal_lib as public dependency, it comes after +# the main library. +main_lib2 = both_libraries('libmain2', link_with : internal_lib) +pkgg.generate(main_lib2, + libraries : internal_lib, + filebase : 'pub-lib-order', +) diff --git a/meson/test cases/common/44 pkgconfig-gen/meson.build b/meson/test cases/common/44 pkgconfig-gen/meson.build new file mode 100644 index 000000000..64965bc21 --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/meson.build @@ -0,0 +1,126 @@ +project('pkgconfig-gen', 'c') + +# Some CI runners does not have zlib, just skip them as we need some common +# external dependency. +cc = meson.get_compiler('c') +if not cc.find_library('z', required: false).found() + error('MESON_SKIP_TEST: zlib missing') +endif + +# First check we have pkg-config >= 0.29 +pkgconfig = find_program('pkg-config', required: false) +if not pkgconfig.found() + error('MESON_SKIP_TEST: pkg-config not found') +endif + +v = run_command(pkgconfig, '--version').stdout().strip() +if v.version_compare('<0.29') + error('MESON_SKIP_TEST: pkg-config version \'' + v + '\' too old') +endif + +pkgg = import('pkgconfig') + +lib = shared_library('simple', 'simple.c') +libver = '1.0' +h = install_headers('simple.h') + +pkgg.generate( + libraries : [lib, '-lz'], + subdirs : '.', + version : libver, + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + requires : 'glib-2.0', # Not really, but only here to test that this works. + requires_private : ['gio-2.0', 'gobject-2.0'], + libraries_private : [lib, '-lz'], +) + +test('pkgconfig-validation', pkgconfig, + args: ['--validate', 'simple'], + env: [ 'PKG_CONFIG_PATH=' + meson.current_build_dir() + '/meson-private' ]) + +# Test that name_prefix='' and name='libfoo' results in '-lfoo' +lib2 = shared_library('libfoo', 'simple.c', + name_prefix : '', + version : libver) + +pkgg.generate( + libraries : lib2, + name : 'libfoo', + version : libver, + description : 'A foo library.', + variables : ['foo=bar', 'datadir=${prefix}/data'], + extra_cflags : ['-DLIBFOO'], +) + +pkgg.generate( + name : 'libhello', + description : 'A minimalistic pkgconfig file.', + version : libver, +) + +pkgg.generate( + name : 'libhello_nolib', + description : 'A minimalistic pkgconfig file.', + version : libver, + dataonly: true, + variables : { + 'foo': 'bar', + # prefix is not set by default for dataonly pc files, but it is allowed to + # define it manually. + 'prefix': get_option('prefix'), + 'escaped_var': 'hello world', + }, + unescaped_variables: { + 'unescaped_var': 'hello world', + } +) + +# Regression test for 2 cases: +# - link_whole from InternalDependency used to be ignored, but we should still +# recurse to add libraries they link to. In this case it must add `-lsimple1` +# in generated pc file. +# - dependencies from InternalDependency used to be ignored. In this it must add +# `-lz` in generated pc file. +simple1 = shared_library('simple1', 'simple.c') +stat1 = static_library('stat1', 'simple.c', link_with: simple1) +dep = declare_dependency(link_whole: stat1, dependencies: cc.find_library('z')) +simple2 = library('simple2', 'simple.c') +pkgg.generate(simple2, libraries: dep) + +# Regression test: as_system() does a deepcopy() of the InternalDependency object +# which caused `-lsimple3` to be duplicated because generator used to compare +# Target instances instead of their id. +simple3 = shared_library('simple3', 'simple.c') +dep1 = declare_dependency(link_with: simple3) +dep2 = dep1.as_system() +pkgg.generate(libraries: [dep1, dep2], + name: 'simple3', + description: 'desc') + +# Regression test: stat2 is both link_with and link_whole, it should not appear +# in generated pc file. +stat2 = static_library('stat2', 'simple.c', install: true) +simple4 = library('simple4', 'simple.c', link_with: stat2) +simple5 = library('simple5', 'simple5.c', link_with: simple4, link_whole: stat2) +pkgg.generate(simple5) + +# Test passing a linkable CustomTarget and CustomTargetIndex to generator. +# Do this only with gcc/clang to not have to deal with other compiler command +# line specificities. +if cc.get_id() in ['gcc', 'clang'] + ct = custom_target('ct', + input: 'simple.c', + output: 'libct.so', + command: [cc.cmd_array(), '@INPUT@', '-shared', '-o', '@OUTPUT@'], + ) + pkgg.generate(libraries: ct, + name: 'ct', + description: 'custom target' + ) + pkgg.generate(libraries: ct[0], + name: 'ct0', + description: 'custom target index' + ) +endif diff --git a/meson/test cases/common/44 pkgconfig-gen/simple.c b/meson/test cases/common/44 pkgconfig-gen/simple.c new file mode 100644 index 000000000..ff86f3110 --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/simple.c @@ -0,0 +1,5 @@ +#include"simple.h" + +int simple_function(void) { + return 42; +} diff --git a/meson/test cases/common/44 pkgconfig-gen/simple.h b/meson/test cases/common/44 pkgconfig-gen/simple.h new file mode 100644 index 000000000..6896bfd17 --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/simple.h @@ -0,0 +1,6 @@ +#ifndef SIMPLE_H_ +#define SIMPLE_H_ + +int simple_function(void); + +#endif diff --git a/meson/test cases/common/44 pkgconfig-gen/simple5.c b/meson/test cases/common/44 pkgconfig-gen/simple5.c new file mode 100644 index 000000000..9f924bd5e --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/simple5.c @@ -0,0 +1,6 @@ +int simple5(void); + +int simple5(void) +{ + return 0; +} diff --git a/meson/test cases/common/44 pkgconfig-gen/test.json b/meson/test cases/common/44 pkgconfig-gen/test.json new file mode 100644 index 000000000..e741a6251 --- /dev/null +++ b/meson/test cases/common/44 pkgconfig-gen/test.json @@ -0,0 +1,15 @@ +{ + "installed": [ + {"type": "file", "file": "usr/include/simple.h"}, + {"type": "file", "file": "usr/lib/libstat2.a"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/libfoo.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/libhello.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple2.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple3.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple5.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/ct.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/ct0.pc"} + ] +} diff --git a/meson/test cases/common/45 custom install dirs/datafile.cat b/meson/test cases/common/45 custom install dirs/datafile.cat new file mode 100644 index 000000000..53d81fc2a --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/datafile.cat @@ -0,0 +1 @@ +Installed cat is installed. diff --git a/meson/test cases/common/45 custom install dirs/meson.build b/meson/test cases/common/45 custom install dirs/meson.build new file mode 100644 index 000000000..494ff0ec2 --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/meson.build @@ -0,0 +1,11 @@ +project('custom install dirs', 'c') +executable('prog', 'prog.c', install : true, install_dir : 'dib/dab/dub') +executable('prog2', 'prog.c', install : true, install_dir : get_option('prefix') + '/dib/dab/dub2') +install_headers('sample.h', install_dir : 'some/dir') +install_headers('sample.h', install_dir : get_option('prefix') + '/some/dir2') +install_man('prog.1', install_dir : 'woman') +install_man('prog.1', install_dir : get_option('prefix') + '/woman2') +install_data('datafile.cat', install_dir : 'meow') +install_data('datafile.cat', install_dir : get_option('prefix') + '/meow2') +install_subdir('subdir', install_dir : 'woof') +install_subdir('subdir', install_dir : get_option('prefix') + '/woof2') diff --git a/meson/test cases/common/45 custom install dirs/prog.1 b/meson/test cases/common/45 custom install dirs/prog.1 new file mode 100644 index 000000000..08ef7da62 --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/prog.1 @@ -0,0 +1 @@ +Man up, you. diff --git a/meson/test cases/common/45 custom install dirs/prog.c b/meson/test cases/common/45 custom install dirs/prog.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/prog.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/45 custom install dirs/sample.h b/meson/test cases/common/45 custom install dirs/sample.h new file mode 100644 index 000000000..dc030dac1 --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/sample.h @@ -0,0 +1,6 @@ +#ifndef SAMPLE_H +#define SAMPLE_H + +int wackiness(); + +#endif diff --git a/meson/test cases/common/45 custom install dirs/subdir/datafile.dog b/meson/test cases/common/45 custom install dirs/subdir/datafile.dog new file mode 100644 index 000000000..7a5bcb765 --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/subdir/datafile.dog @@ -0,0 +1 @@ +Installed dog is installed. diff --git a/meson/test cases/common/45 custom install dirs/test.json b/meson/test cases/common/45 custom install dirs/test.json new file mode 100644 index 000000000..ac82fdb98 --- /dev/null +++ b/meson/test cases/common/45 custom install dirs/test.json @@ -0,0 +1,16 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/dib/dab/dub/prog"}, + {"type": "pdb", "file": "usr/dib/dab/dub/prog"}, + {"type": "exe", "file": "usr/dib/dab/dub2/prog2"}, + {"type": "pdb", "file": "usr/dib/dab/dub2/prog2"}, + {"type": "file", "file": "usr/some/dir/sample.h"}, + {"type": "file", "file": "usr/some/dir2/sample.h"}, + {"type": "file", "file": "usr/woman/prog.1"}, + {"type": "file", "file": "usr/woman2/prog.1"}, + {"type": "file", "file": "usr/meow/datafile.cat"}, + {"type": "file", "file": "usr/meow2/datafile.cat"}, + {"type": "file", "file": "usr/woof/subdir/datafile.dog"}, + {"type": "file", "file": "usr/woof2/subdir/datafile.dog"} + ] +} diff --git a/meson/test cases/common/46 subproject subproject/meson.build b/meson/test cases/common/46 subproject subproject/meson.build new file mode 100644 index 000000000..d8735a1cd --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/meson.build @@ -0,0 +1,11 @@ +project('sub sub', 'c') + +a = subproject('a') +lib = a.get_variable('l') + +dependency('not-found-dep', required : false, + version : '>=1', + fallback : ['c', 'notfound_dep']) + +exe = executable('prog', 'prog.c', link_with : lib) +test('basic', exe) diff --git a/meson/test cases/common/46 subproject subproject/prog.c b/meson/test cases/common/46 subproject subproject/prog.c new file mode 100644 index 000000000..27162c5e0 --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/prog.c @@ -0,0 +1,5 @@ +int func(void); + +int main(void) { + return func() == 42 ? 0 : 1; +} diff --git a/meson/test cases/common/46 subproject subproject/subprojects/a/a.c b/meson/test cases/common/46 subproject subproject/subprojects/a/a.c new file mode 100644 index 000000000..f9848c136 --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/subprojects/a/a.c @@ -0,0 +1,15 @@ +int func2(void); + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func(void) { return func2(); } + diff --git a/meson/test cases/common/46 subproject subproject/subprojects/a/meson.build b/meson/test cases/common/46 subproject subproject/subprojects/a/meson.build new file mode 100644 index 000000000..f0dfc449e --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/subprojects/a/meson.build @@ -0,0 +1,4 @@ +project('a', 'c') + +b = subproject('b') +l = shared_library('a', 'a.c', link_with : b.get_variable('lb')) diff --git a/meson/test cases/common/46 subproject subproject/subprojects/b/b.c b/meson/test cases/common/46 subproject subproject/subprojects/b/b.c new file mode 100644 index 000000000..8c07177a6 --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/subprojects/b/b.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func2(void) { + return 42; +} diff --git a/meson/test cases/common/46 subproject subproject/subprojects/b/meson.build b/meson/test cases/common/46 subproject subproject/subprojects/b/meson.build new file mode 100644 index 000000000..e7af6067a --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/subprojects/b/meson.build @@ -0,0 +1,3 @@ +project('b', 'c') + +lb = shared_library('b', 'b.c') diff --git a/meson/test cases/common/46 subproject subproject/subprojects/c/meson.build b/meson/test cases/common/46 subproject subproject/subprojects/c/meson.build new file mode 100644 index 000000000..97a5be1d0 --- /dev/null +++ b/meson/test cases/common/46 subproject subproject/subprojects/c/meson.build @@ -0,0 +1,3 @@ +project('not-found-dep-subproj', 'c', version : '1.0') + +notfound_dep = dependency('', required : false) diff --git a/meson/test cases/common/47 same file name/d1/file.c b/meson/test cases/common/47 same file name/d1/file.c new file mode 100644 index 000000000..46e51723b --- /dev/null +++ b/meson/test cases/common/47 same file name/d1/file.c @@ -0,0 +1 @@ +int func1(void) { return 42; } diff --git a/meson/test cases/common/47 same file name/d2/file.c b/meson/test cases/common/47 same file name/d2/file.c new file mode 100644 index 000000000..3d367f16e --- /dev/null +++ b/meson/test cases/common/47 same file name/d2/file.c @@ -0,0 +1 @@ +int func2(void) { return 42; } diff --git a/meson/test cases/common/47 same file name/meson.build b/meson/test cases/common/47 same file name/meson.build new file mode 100644 index 000000000..3f351afc1 --- /dev/null +++ b/meson/test cases/common/47 same file name/meson.build @@ -0,0 +1,3 @@ +project('samefile', 'c') + +test('basic', executable('prog', 'prog.c', 'd1/file.c', 'd2/file.c')) diff --git a/meson/test cases/common/47 same file name/prog.c b/meson/test cases/common/47 same file name/prog.c new file mode 100644 index 000000000..1eee6c2f6 --- /dev/null +++ b/meson/test cases/common/47 same file name/prog.c @@ -0,0 +1,6 @@ +int func1(void); +int func2(void); + +int main(void) { + return func1() - func2(); +} diff --git a/meson/test cases/common/48 file grabber/a.c b/meson/test cases/common/48 file grabber/a.c new file mode 100644 index 000000000..8f63c2de5 --- /dev/null +++ b/meson/test cases/common/48 file grabber/a.c @@ -0,0 +1 @@ +int funca(void) { return 0; } diff --git a/meson/test cases/common/48 file grabber/b.c b/meson/test cases/common/48 file grabber/b.c new file mode 100644 index 000000000..f38baca8c --- /dev/null +++ b/meson/test cases/common/48 file grabber/b.c @@ -0,0 +1 @@ +int funcb(void) { return 0; } diff --git a/meson/test cases/common/48 file grabber/c.c b/meson/test cases/common/48 file grabber/c.c new file mode 100644 index 000000000..2e8abbfc9 --- /dev/null +++ b/meson/test cases/common/48 file grabber/c.c @@ -0,0 +1 @@ +int funcc(void) { return 0; } diff --git a/meson/test cases/common/48 file grabber/grabber.bat b/meson/test cases/common/48 file grabber/grabber.bat new file mode 100644 index 000000000..86603145c --- /dev/null +++ b/meson/test cases/common/48 file grabber/grabber.bat @@ -0,0 +1,5 @@ +@ECHO OFF +echo a.c +echo b.c +echo c.c +echo prog.c diff --git a/meson/test cases/common/48 file grabber/grabber.sh b/meson/test cases/common/48 file grabber/grabber.sh new file mode 100755 index 000000000..5e8f4b947 --- /dev/null +++ b/meson/test cases/common/48 file grabber/grabber.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +for i in *.c; do + echo $i +done diff --git a/meson/test cases/common/48 file grabber/grabber2.bat b/meson/test cases/common/48 file grabber/grabber2.bat new file mode 100644 index 000000000..d1a3f98ba --- /dev/null +++ b/meson/test cases/common/48 file grabber/grabber2.bat @@ -0,0 +1,5 @@ +@ECHO OFF +echo suba.c +echo subb.c +echo subc.c +echo subprog.c diff --git a/meson/test cases/common/48 file grabber/meson.build b/meson/test cases/common/48 file grabber/meson.build new file mode 100644 index 000000000..e332c0b1e --- /dev/null +++ b/meson/test cases/common/48 file grabber/meson.build @@ -0,0 +1,35 @@ +project('grabber', 'c') + +# What this script does is NOT reliable. Simply adding a file in this directory +# will NOT make it automatically appear in the build. You have to manually +# re-invoke Meson (not just Ninja) for that to happen. The simplest way +# is to touch meson-private/coredata.dat. + +# This is not the recommended way to do things, but if the tradeoffs are +# acceptable to you, then we're certainly not going to stop you. Just don't +# file bugs when it fails. :) + +if build_machine.system() == 'windows' + c = run_command('grabber.bat') + grabber = find_program('grabber2.bat') +else + c = run_command('grabber.sh') + grabber = find_program('grabber.sh') +endif + + +# First test running command explicitly. +if c.returncode() != 0 + error('Executing script failed.') +endif + +newline = ''' +''' + +sources = c.stdout().strip().split(newline) + +e = executable('prog', sources) +test('grabtest', e) + +# Then test using program with find_program +subdir('subdir') diff --git a/meson/test cases/common/48 file grabber/prog.c b/meson/test cases/common/48 file grabber/prog.c new file mode 100644 index 000000000..ff55723e1 --- /dev/null +++ b/meson/test cases/common/48 file grabber/prog.c @@ -0,0 +1,7 @@ +int funca(void); +int funcb(void); +int funcc(void); + +int main(void) { + return funca() + funcb() + funcc(); +} diff --git a/meson/test cases/common/48 file grabber/subdir/meson.build b/meson/test cases/common/48 file grabber/subdir/meson.build new file mode 100644 index 000000000..230d6f7de --- /dev/null +++ b/meson/test cases/common/48 file grabber/subdir/meson.build @@ -0,0 +1,5 @@ +sc = run_command(grabber) +subsources = sc.stdout().strip().split(newline) + +se = executable('subprog', subsources) +test('subgrabtest', se) diff --git a/meson/test cases/common/48 file grabber/subdir/suba.c b/meson/test cases/common/48 file grabber/subdir/suba.c new file mode 100644 index 000000000..8f63c2de5 --- /dev/null +++ b/meson/test cases/common/48 file grabber/subdir/suba.c @@ -0,0 +1 @@ +int funca(void) { return 0; } diff --git a/meson/test cases/common/48 file grabber/subdir/subb.c b/meson/test cases/common/48 file grabber/subdir/subb.c new file mode 100644 index 000000000..f38baca8c --- /dev/null +++ b/meson/test cases/common/48 file grabber/subdir/subb.c @@ -0,0 +1 @@ +int funcb(void) { return 0; } diff --git a/meson/test cases/common/48 file grabber/subdir/subc.c b/meson/test cases/common/48 file grabber/subdir/subc.c new file mode 100644 index 000000000..2e8abbfc9 --- /dev/null +++ b/meson/test cases/common/48 file grabber/subdir/subc.c @@ -0,0 +1 @@ +int funcc(void) { return 0; } diff --git a/meson/test cases/common/48 file grabber/subdir/subprog.c b/meson/test cases/common/48 file grabber/subdir/subprog.c new file mode 100644 index 000000000..ff55723e1 --- /dev/null +++ b/meson/test cases/common/48 file grabber/subdir/subprog.c @@ -0,0 +1,7 @@ +int funca(void); +int funcb(void); +int funcc(void); + +int main(void) { + return funca() + funcb() + funcc(); +} diff --git a/meson/test cases/common/49 custom target/data_source.txt b/meson/test cases/common/49 custom target/data_source.txt new file mode 100644 index 000000000..0c23cc0c3 --- /dev/null +++ b/meson/test cases/common/49 custom target/data_source.txt @@ -0,0 +1 @@ +This is a text only input file. diff --git a/meson/test cases/common/49 custom target/depfile/dep.py b/meson/test cases/common/49 custom target/depfile/dep.py new file mode 100755 index 000000000..c9e8f94dc --- /dev/null +++ b/meson/test cases/common/49 custom target/depfile/dep.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import sys, os +from glob import glob + +_, srcdir, depfile, output = sys.argv + +depfiles = glob(os.path.join(srcdir, '*')) + +quoted_depfiles = [x.replace(' ', r'\ ') for x in depfiles] + +with open(output, 'w') as f: + f.write('I am the result of globbing.') +with open(depfile, 'w') as f: + f.write('{}: {}\n'.format(output, ' '.join(quoted_depfiles))) diff --git a/meson/test cases/common/49 custom target/depfile/meson.build b/meson/test cases/common/49 custom target/depfile/meson.build new file mode 100644 index 000000000..46bca7405 --- /dev/null +++ b/meson/test cases/common/49 custom target/depfile/meson.build @@ -0,0 +1,7 @@ + + +mytarget = custom_target('depfile', + output : 'dep.dat', + depfile : 'dep.dat.d', + command : [find_program('dep.py'), meson.current_source_dir(), '@DEPFILE@', '@OUTPUT@'], +) diff --git a/meson/test cases/common/49 custom target/meson.build b/meson/test cases/common/49 custom target/meson.build new file mode 100644 index 000000000..52e8630dc --- /dev/null +++ b/meson/test cases/common/49 custom target/meson.build @@ -0,0 +1,67 @@ +project('custom target', 'c') + +python = find_program('python3', required : false) +if not python.found() + python = find_program('python') +endif + +# Note that this will not add a dependency to the compiler executable. +# Code will not be rebuilt if it changes. +comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py') +# Test that files() in command: works. The compiler just discards it. +useless = files('test.json') + +mytarget = custom_target('bindat', +output : 'data.dat', +input : 'data_source.txt', +command : [python, comp, '--input=@INPUT@', '--output=@OUTPUT@', useless], +env: {'MY_COMPILER_ENV': 'value'}, +install : true, +install_dir : 'subdir' +) + +has_not_changed = false +if is_disabler(mytarget) + has_not_changed = true +else + has_not_changed = true +endif +assert(has_not_changed, 'Custom target has changed.') + +assert(not is_disabler(mytarget), 'Custom target is a disabler.') + +mytarget_disabler = custom_target('bindat', +output : 'data.dat', +input : 'data_source.txt', +command : [disabler(), comp, '--input=@INPUT@', '--output=@OUTPUT@', useless], +install : true, +install_dir : 'subdir' +) + +if mytarget_disabler.found() + mytarget_disabled = false +else + mytarget_disabled = true +endif + +assert(mytarget_disabled, 'Disabled custom target should not be found.') + +mytarget_disabler = custom_target('bindat', +output : 'data.dat', +input : disabler(), +command : [python, comp, '--input=@INPUT@', '--output=@OUTPUT@', useless], +install : true, +install_dir : 'subdir' +) + +assert(is_disabler(mytarget_disabler), 'Disabled custom target is not a disabler.') + +if mytarget_disabler.found() + mytarget_disabled = false +else + mytarget_disabled = true +endif + +assert(mytarget_disabled, 'Disabled custom target should not be found.') + +subdir('depfile') diff --git a/meson/test cases/common/49 custom target/my_compiler.py b/meson/test cases/common/49 custom target/my_compiler.py new file mode 100755 index 000000000..986911124 --- /dev/null +++ b/meson/test cases/common/49 custom target/my_compiler.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 + +import os +import sys + +assert(os.path.exists(sys.argv[3])) + +args = sys.argv[:-1] + +if __name__ == '__main__': + assert os.environ['MY_COMPILER_ENV'] == 'value' + if len(args) != 3 or not args[1].startswith('--input') or \ + not args[2].startswith('--output'): + print(args[0], '--input=input_file --output=output_file') + sys.exit(1) + with open(args[1].split('=')[1]) as f: + ifile = f.read() + if ifile != 'This is a text only input file.\n': + print('Malformed input') + sys.exit(1) + with open(args[2].split('=')[1], 'w') as ofile: + ofile.write('This is a binary output file.\n') diff --git a/meson/test cases/common/49 custom target/test.json b/meson/test cases/common/49 custom target/test.json new file mode 100644 index 000000000..ba66b024a --- /dev/null +++ b/meson/test cases/common/49 custom target/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"type": "file", "file": "usr/subdir/data.dat"} + ] +} diff --git a/meson/test cases/common/5 linkstatic/libfile.c b/meson/test cases/common/5 linkstatic/libfile.c new file mode 100644 index 000000000..91800303c --- /dev/null +++ b/meson/test cases/common/5 linkstatic/libfile.c @@ -0,0 +1,3 @@ +int func(void) { + return 0; +} diff --git a/meson/test cases/common/5 linkstatic/libfile2.c b/meson/test cases/common/5 linkstatic/libfile2.c new file mode 100644 index 000000000..5badf23bc --- /dev/null +++ b/meson/test cases/common/5 linkstatic/libfile2.c @@ -0,0 +1,3 @@ +int func2(void) { + return 2; +} diff --git a/meson/test cases/common/5 linkstatic/libfile3.c b/meson/test cases/common/5 linkstatic/libfile3.c new file mode 100644 index 000000000..4bfe52a32 --- /dev/null +++ b/meson/test cases/common/5 linkstatic/libfile3.c @@ -0,0 +1,3 @@ +int func3(void) { + return 3; +} diff --git a/meson/test cases/common/5 linkstatic/libfile4.c b/meson/test cases/common/5 linkstatic/libfile4.c new file mode 100644 index 000000000..ce1fe6872 --- /dev/null +++ b/meson/test cases/common/5 linkstatic/libfile4.c @@ -0,0 +1,3 @@ +int func4(void) { + return 4; +} diff --git a/meson/test cases/common/5 linkstatic/main.c b/meson/test cases/common/5 linkstatic/main.c new file mode 100644 index 000000000..128f2bb15 --- /dev/null +++ b/meson/test cases/common/5 linkstatic/main.c @@ -0,0 +1,5 @@ +int func(void); + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/5 linkstatic/meson.build b/meson/test cases/common/5 linkstatic/meson.build new file mode 100644 index 000000000..1f02a5c88 --- /dev/null +++ b/meson/test cases/common/5 linkstatic/meson.build @@ -0,0 +1,6 @@ +project('static library linking test', 'c') + +lib = build_target('mylib', 'libfile.c', 'libfile2.c', 'libfile3.c', 'libfile4.c', target_type : 'static_library') +exe = executable('prog', 'main.c', link_with : lib) + +test('runtest', exe) diff --git a/meson/test cases/common/50 custom target chain/data_source.txt b/meson/test cases/common/50 custom target chain/data_source.txt new file mode 100644 index 000000000..0c23cc0c3 --- /dev/null +++ b/meson/test cases/common/50 custom target chain/data_source.txt @@ -0,0 +1 @@ +This is a text only input file. diff --git a/meson/test cases/common/50 custom target chain/meson.build b/meson/test cases/common/50 custom target chain/meson.build new file mode 100644 index 000000000..138f795bd --- /dev/null +++ b/meson/test cases/common/50 custom target chain/meson.build @@ -0,0 +1,34 @@ +project('custom target', 'c') + +python = find_program('python3', required : false) +if not python.found() + python = find_program('python') +endif + +# files() is the correct way to do this, but some people +# do this so test that it works. +comp = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler.py') +comp2 = '@0@/@1@'.format(meson.current_source_dir(), 'my_compiler2.py') +infile = files('data_source.txt')[0] + +mytarget = custom_target('bindat', + output : 'data.dat', + command : [python, comp, infile, '@OUTPUT@'], +) + +mytarget2 = custom_target('bindat2', + output : 'data2.dat', + command : [python, comp2, mytarget, '@OUTPUT@'], + install : true, + install_dir : 'subdir' +) + +mytarget3 = custom_target('bindat3', + output : 'data3.dat', + input : [mytarget], + command : [python, comp2, '@INPUT@', '@OUTPUT@'], + install : true, + install_dir : 'subdir' +) + +subdir('usetarget') diff --git a/meson/test cases/common/50 custom target chain/my_compiler.py b/meson/test cases/common/50 custom target chain/my_compiler.py new file mode 100755 index 000000000..d99029bcc --- /dev/null +++ b/meson/test cases/common/50 custom target chain/my_compiler.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import sys + +if __name__ == '__main__': + if len(sys.argv) != 3: + print(sys.argv[0], 'input_file output_file') + sys.exit(1) + with open(sys.argv[1]) as f: + ifile = f.read() + if ifile != 'This is a text only input file.\n': + print('Malformed input') + sys.exit(1) + with open(sys.argv[2], 'w') as ofile: + ofile.write('This is a binary output file.\n') diff --git a/meson/test cases/common/50 custom target chain/my_compiler2.py b/meson/test cases/common/50 custom target chain/my_compiler2.py new file mode 100755 index 000000000..22ec7897b --- /dev/null +++ b/meson/test cases/common/50 custom target chain/my_compiler2.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import sys + +if __name__ == '__main__': + if len(sys.argv) != 3: + print(sys.argv[0], 'input_file output_file') + sys.exit(1) + with open(sys.argv[1]) as f: + ifile = f.read() + if ifile != 'This is a binary output file.\n': + print('Malformed input') + sys.exit(1) + with open(sys.argv[2], 'w') as ofile: + ofile.write('This is a different binary output file.\n') diff --git a/meson/test cases/common/50 custom target chain/test.json b/meson/test cases/common/50 custom target chain/test.json new file mode 100644 index 000000000..d6b0fa9b8 --- /dev/null +++ b/meson/test cases/common/50 custom target chain/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "file", "file": "usr/subdir/data2.dat"}, + {"type": "file", "file": "usr/subdir/data3.dat"} + ] +} diff --git a/meson/test cases/common/50 custom target chain/usetarget/meson.build b/meson/test cases/common/50 custom target chain/usetarget/meson.build new file mode 100644 index 000000000..9aece8ce9 --- /dev/null +++ b/meson/test cases/common/50 custom target chain/usetarget/meson.build @@ -0,0 +1,8 @@ +e = executable('myexe', 'myexe.c') +subexe = find_program('subcomp.py') + +custom_target('use_exe', + input : e, + output : 'subout.res', + command : [subexe, '@INPUT@', '@OUTPUT@'], +) diff --git a/meson/test cases/common/50 custom target chain/usetarget/myexe.c b/meson/test cases/common/50 custom target chain/usetarget/myexe.c new file mode 100644 index 000000000..33311332e --- /dev/null +++ b/meson/test cases/common/50 custom target chain/usetarget/myexe.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am myexe.\n"); + return 0; +} diff --git a/meson/test cases/common/50 custom target chain/usetarget/subcomp.py b/meson/test cases/common/50 custom target chain/usetarget/subcomp.py new file mode 100755 index 000000000..52dc0bbb5 --- /dev/null +++ b/meson/test cases/common/50 custom target chain/usetarget/subcomp.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[1], 'rb') as ifile: + with open(sys.argv[2], 'w') as ofile: + ofile.write('Everything ok.\n') diff --git a/meson/test cases/common/51 run target/check-env.py b/meson/test cases/common/51 run target/check-env.py new file mode 100644 index 000000000..cf3eb7c08 --- /dev/null +++ b/meson/test cases/common/51 run target/check-env.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 + +import os, sys +from pathlib import Path + +assert 'MESON_SOURCE_ROOT' in os.environ +assert 'MESON_BUILD_ROOT' in os.environ +assert 'MESON_SUBDIR' in os.environ +assert 'MESONINTROSPECT' in os.environ +assert 'MY_ENV' in os.environ + +# Environment has absolute paths and argv has relative paths when using ninja +# backend and absolute paths when using vs backend. What matters is once +# resolved they point to same location. +env_source_root = Path(os.environ['MESON_SOURCE_ROOT']).resolve() +env_build_root = Path(os.environ['MESON_BUILD_ROOT']).resolve() +env_current_source_dir = Path(env_source_root, os.environ['MESON_SUBDIR']).resolve() +argv_paths = [Path(i).resolve() for i in sys.argv[1:]] +source_root, build_root, current_source_dir = argv_paths + +assert source_root == env_source_root +assert build_root == env_build_root +assert current_source_dir == env_current_source_dir diff --git a/meson/test cases/common/51 run target/check_exists.py b/meson/test cases/common/51 run target/check_exists.py new file mode 100755 index 000000000..b6fc967eb --- /dev/null +++ b/meson/test cases/common/51 run target/check_exists.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import os +import sys + +if not os.path.isfile(sys.argv[1]): + raise Exception("Couldn't find {!r}".format(sys.argv[1])) diff --git a/meson/test cases/common/51 run target/configure.in b/meson/test cases/common/51 run target/configure.in new file mode 100755 index 000000000..0d42d0441 --- /dev/null +++ b/meson/test cases/common/51 run target/configure.in @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +print('Success') \ No newline at end of file diff --git a/meson/test cases/common/51 run target/converter.py b/meson/test cases/common/51 run target/converter.py new file mode 100644 index 000000000..8dd31fe2f --- /dev/null +++ b/meson/test cases/common/51 run target/converter.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[1], 'rb') as ifile, open(sys.argv[2], 'wb') as ofile: + ofile.write(ifile.read()) diff --git a/meson/test cases/common/51 run target/fakeburner.py b/meson/test cases/common/51 run target/fakeburner.py new file mode 100755 index 000000000..8b1f35343 --- /dev/null +++ b/meson/test cases/common/51 run target/fakeburner.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + + +import sys + +plain_arg = sys.argv[1] +_, filename, _ = plain_arg.split(':') +try: + with open(filename, 'rb') as f: + content = f.read() +except FileNotFoundError: + print('Could not open file. Missing dependency?') + sys.exit(1) +print('File opened, pretending to send it somewhere.') +print(len(content), 'bytes uploaded') diff --git a/meson/test cases/common/51 run target/helloprinter.c b/meson/test cases/common/51 run target/helloprinter.c new file mode 100644 index 000000000..4a6e0ac5d --- /dev/null +++ b/meson/test cases/common/51 run target/helloprinter.c @@ -0,0 +1,11 @@ +#include + +int main(int argc, char **argv) { + if(argc != 2) { + printf("I can not haz argument.\n"); + return 1; + } else { + printf("I can haz argument: %s\n", argv[1]); + } + return 0; +} diff --git a/meson/test cases/common/51 run target/meson.build b/meson/test cases/common/51 run target/meson.build new file mode 100644 index 000000000..85d30f0b3 --- /dev/null +++ b/meson/test cases/common/51 run target/meson.build @@ -0,0 +1,107 @@ +project('run target', 'c') + +# Make it possible to run built programs. +# In cross builds exe_wrapper should be added if it exists. + +exe = executable('helloprinter', 'helloprinter.c') + +if not meson.is_cross_build() or meson.can_run_host_binaries() + run_target('runhello', + command : [exe, 'argument']) +endif + +converter = find_program('converter.py') + +hex = custom_target('exe.hex', + input : exe, + output : 'exe.hex', + command : [converter, '@INPUT@', '@OUTPUT@', + ], +) + +fakeburner = find_program('fakeburner.py') + +# These emulates the Arduino flasher application. It sandwiches the filename inside +# a packed argument. Thus we need to declare it manually. +run_target('upload', + command : [fakeburner, 'x:@0@:y'.format(exe.full_path())], + depends : exe, +) + +run_target('upload2', + command : [fakeburner, 'x:@0@:y'.format(hex.full_path())], + depends : hex, +) + +python3 = find_program('python3', required : false) +if not python3.found() + python3 = find_program('python') +endif + +run_target('py3hi', + command : [python3, '-c', 'print("I am Python3.")']) + +run_target('check_exists', + command : [find_program('check_exists.py'), files('helloprinter.c')]) + +run_target('check_exists', + command : [find_program('check_exists.py'), files('helloprinter.c')], + depends : disabler(), +) + +run_target('check_exists', + command : [disabler(), files('helloprinter.c')]) + +# What if the output of a custom_target is the command to +# execute. Obviously this will not work as hex is not an +# executable but test that the output is generated correctly. +run_target('donotrunme', + command : hex) + +# Ensure configure files can be passed +conf = configure_file( + input: 'configure.in', + output: 'configure', + configuration: configuration_data() +) + +run_target('configure_script', + command : conf +) + +custom_target('configure_script_ct', + command: conf, + output: 'dummy.txt', + capture: true) + +# Target names that clash with potential builtin functionality. +run_target('ctags', + command : converter) + +run_target('clang-format', + command : converter) + +# Check we can pass env to the program. Also check some string substitutions +# that were added in 0.57.0 but not documented. This is documented behaviour +# since 0.57.1. +run_target('check-env', + command: [find_program('check-env.py'), '@SOURCE_ROOT@', '@BUILD_ROOT@', + '@CURRENT_SOURCE_DIR@'], + env: {'MY_ENV': '1'}, +) + +# Check some string substitutions that has always been done but never documented. +# Some projects have been relying on this implementation detail. This is +# documented behaviour since 0.57.1. +custom_target('check-env-ct', + command: [find_program('check-env.py'), '@SOURCE_ROOT@', '@BUILD_ROOT@', + '@CURRENT_SOURCE_DIR@'], + env: {'MESON_SOURCE_ROOT': meson.source_root(), + 'MESON_BUILD_ROOT': meson.build_root(), + 'MESON_SUBDIR': meson.current_source_dir(), + 'MESONINTROSPECT': 'fake value', + 'MY_ENV': '1'}, + output: 'check-env-ct', +) + +run_target('textprinter', command: ['subdir/textprinter.py']) diff --git a/meson/test cases/common/51 run target/subdir/textprinter.py b/meson/test cases/common/51 run target/subdir/textprinter.py new file mode 100644 index 000000000..3159c08ed --- /dev/null +++ b/meson/test cases/common/51 run target/subdir/textprinter.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +print('I am a script. Being run.') diff --git a/meson/test cases/common/52 object generator/meson.build b/meson/test cases/common/52 object generator/meson.build new file mode 100644 index 000000000..e20da6f46 --- /dev/null +++ b/meson/test cases/common/52 object generator/meson.build @@ -0,0 +1,34 @@ +project('object generator', 'c') + +python = find_program('python3', required : false) +if not python.found() + python = find_program('python') +endif + +# Note that this will not add a dependency to the compiler executable. +# Code will not be rebuilt if it changes. +comp = '@0@/@1@'.format(meson.current_source_dir(), 'obj_generator.py') + +if host_machine.system() == 'windows' + outputname = '@BASENAME@.obj' +else + outputname = '@BASENAME@.o' +endif + +cc = meson.get_compiler('c').cmd_array().get(-1) +# Generate an object file manually. +gen = generator(python, + output : outputname, + arguments : [comp, cc, '@INPUT@', '@OUTPUT@']) + +generated = gen.process(['source.c', 'source2.c']) + +# Generate an object file with indexed OUTPUT replacement. +gen2 = generator(python, + output : outputname, + arguments : [comp, cc, '@INPUT@', '@OUTPUT0@']) +generated2 = gen2.process(['source3.c']) + +e = executable('prog', 'prog.c', generated, generated2) + +test('objgen', e) \ No newline at end of file diff --git a/meson/test cases/common/52 object generator/obj_generator.py b/meson/test cases/common/52 object generator/obj_generator.py new file mode 100755 index 000000000..a33872aac --- /dev/null +++ b/meson/test cases/common/52 object generator/obj_generator.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +# Mimic a binary that generates an object file (e.g. windres). + +import sys, subprocess + +if __name__ == '__main__': + if len(sys.argv) != 4: + print(sys.argv[0], 'compiler input_file output_file') + sys.exit(1) + compiler = sys.argv[1] + ifile = sys.argv[2] + ofile = sys.argv[3] + if compiler.endswith('cl'): + cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile] + else: + cmd = [compiler, '-c', ifile, '-o', ofile] + sys.exit(subprocess.call(cmd)) diff --git a/meson/test cases/common/52 object generator/prog.c b/meson/test cases/common/52 object generator/prog.c new file mode 100644 index 000000000..9841180d0 --- /dev/null +++ b/meson/test cases/common/52 object generator/prog.c @@ -0,0 +1,7 @@ +int func1_in_obj(void); +int func2_in_obj(void); +int func3_in_obj(void); + +int main(void) { + return func1_in_obj() + func2_in_obj() + func3_in_obj(); +} diff --git a/meson/test cases/common/52 object generator/source.c b/meson/test cases/common/52 object generator/source.c new file mode 100644 index 000000000..1dc08e168 --- /dev/null +++ b/meson/test cases/common/52 object generator/source.c @@ -0,0 +1,3 @@ +int func1_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/52 object generator/source2.c b/meson/test cases/common/52 object generator/source2.c new file mode 100644 index 000000000..8024b9714 --- /dev/null +++ b/meson/test cases/common/52 object generator/source2.c @@ -0,0 +1,3 @@ +int func2_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/52 object generator/source3.c b/meson/test cases/common/52 object generator/source3.c new file mode 100644 index 000000000..c4362c4d6 --- /dev/null +++ b/meson/test cases/common/52 object generator/source3.c @@ -0,0 +1,3 @@ +int func3_in_obj(void) { + return 0; +} diff --git a/meson/test cases/common/53 install script/customtarget.py b/meson/test cases/common/53 install script/customtarget.py new file mode 100755 index 000000000..e28373a39 --- /dev/null +++ b/meson/test cases/common/53 install script/customtarget.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +import argparse +import os + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('dirname') + args = parser.parse_args() + + with open(os.path.join(args.dirname, '1.txt'), 'w') as f: + f.write('') + with open(os.path.join(args.dirname, '2.txt'), 'w') as f: + f.write('') + + +if __name__ == "__main__": + main() diff --git a/meson/test cases/common/53 install script/meson.build b/meson/test cases/common/53 install script/meson.build new file mode 100644 index 000000000..24d5dc84b --- /dev/null +++ b/meson/test cases/common/53 install script/meson.build @@ -0,0 +1,45 @@ +project('custom install script', 'c') + +meson.add_install_script('myinstall.py', 'diiba/daaba', 'file.dat') +meson.add_install_script('myinstall.py', 'this/should', 'also-work.dat') + +subdir('src') + +meson.add_install_script('myinstall.py', 'dir', afile, '--mode=copy') + +data = configuration_data() +data.set10('foo', true) +conf = configure_file( + configuration : data, + output : 'conf.txt' +) + +meson.add_install_script('myinstall.py', 'dir', conf, '--mode=copy') + +t = custom_target( + 'ct', + command : [find_program('customtarget.py'), '@OUTDIR@'], + output : ['1.txt', '2.txt'], +) + +meson.add_install_script('myinstall.py', 'customtarget', t, '--mode=copy') +meson.add_install_script('myinstall.py', 'customtargetindex', t[0], '--mode=copy') + +installer = configure_file( + input : 'myinstall.py', + output : 'myinstall_copy.py', + copy : true, +) + +meson.add_install_script(installer, 'otherdir', afile, '--mode=copy') + +# This executable links on a library built in src/ directory. On Windows this +# means meson must add src/ into $PATH to find the DLL when running it as +# install script. +myexe = executable('prog', 'prog.c', + link_with: mylib, + install : true, +) +if meson.can_run_host_binaries() + meson.add_install_script(myexe) +endif diff --git a/meson/test cases/common/53 install script/myinstall.py b/meson/test cases/common/53 install script/myinstall.py new file mode 100755 index 000000000..a57334242 --- /dev/null +++ b/meson/test cases/common/53 install script/myinstall.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 + +import argparse +import os +import shutil + +prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX'] + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('dirname') + parser.add_argument('files', nargs='+') + parser.add_argument('--mode', action='store', default='create', choices=['create', 'copy']) + args = parser.parse_args() + + dirname = os.path.join(prefix, args.dirname) + if not os.path.exists(dirname): + os.makedirs(dirname) + + if args.mode == 'create': + for name in args.files: + with open(os.path.join(dirname, name), 'w') as f: + f.write('') + else: + for name in args.files: + shutil.copy(name, dirname) + + +if __name__ == "__main__": + main() diff --git a/meson/test cases/common/53 install script/prog.c b/meson/test cases/common/53 install script/prog.c new file mode 100644 index 000000000..85f8df9c3 --- /dev/null +++ b/meson/test cases/common/53 install script/prog.c @@ -0,0 +1,14 @@ +#include + +#ifdef _WIN32 + #define DO_IMPORT __declspec(dllimport) +#else + #define DO_IMPORT +#endif + +DO_IMPORT int foo(void); + +int main(void) { + printf("This is text.\n"); + return foo(); +} diff --git a/meson/test cases/common/53 install script/src/a file.txt b/meson/test cases/common/53 install script/src/a file.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/53 install script/src/foo.c b/meson/test cases/common/53 install script/src/foo.c new file mode 100644 index 000000000..46cb845f9 --- /dev/null +++ b/meson/test cases/common/53 install script/src/foo.c @@ -0,0 +1,10 @@ +#ifdef _WIN32 + #define DO_EXPORT __declspec(dllexport) +#else + #define DO_EXPORT +#endif + +DO_EXPORT int foo(void) +{ + return 0; +} diff --git a/meson/test cases/common/53 install script/src/meson.build b/meson/test cases/common/53 install script/src/meson.build new file mode 100644 index 000000000..72de34609 --- /dev/null +++ b/meson/test cases/common/53 install script/src/meson.build @@ -0,0 +1,5 @@ +meson.add_install_script('myinstall.py', 'this/does', 'something-different.dat') + +afile = files('a file.txt') + +mylib = shared_library('mylib', 'foo.c') diff --git a/meson/test cases/common/53 install script/src/myinstall.py b/meson/test cases/common/53 install script/src/myinstall.py new file mode 100644 index 000000000..3a9d89b50 --- /dev/null +++ b/meson/test cases/common/53 install script/src/myinstall.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import os +import sys + +prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX'] + +dirname = os.path.join(prefix, sys.argv[1]) + +if not os.path.exists(dirname): + os.makedirs(dirname) + +with open(os.path.join(dirname, sys.argv[2] + '.in'), 'w') as f: + f.write('') diff --git a/meson/test cases/common/53 install script/test.json b/meson/test cases/common/53 install script/test.json new file mode 100644 index 000000000..7ac26070f --- /dev/null +++ b/meson/test cases/common/53 install script/test.json @@ -0,0 +1,15 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/prog"}, + {"type": "pdb", "file": "usr/bin/prog"}, + {"type": "file", "file": "usr/diiba/daaba/file.dat"}, + {"type": "file", "file": "usr/this/should/also-work.dat"}, + {"type": "file", "file": "usr/this/does/something-different.dat.in"}, + {"type": "file", "file": "usr/dir/a file.txt"}, + {"type": "file", "file": "usr/dir/conf.txt"}, + {"type": "file", "file": "usr/otherdir/a file.txt"}, + {"type": "file", "file": "usr/customtarget/1.txt"}, + {"type": "file", "file": "usr/customtarget/2.txt"}, + {"type": "file", "file": "usr/customtargetindex/1.txt"} + ] +} diff --git a/meson/test cases/common/54 custom target source output/generator.py b/meson/test cases/common/54 custom target source output/generator.py new file mode 100755 index 000000000..1bec8e855 --- /dev/null +++ b/meson/test cases/common/54 custom target source output/generator.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import sys, os + +if len(sys.argv) != 2: + print(sys.argv[0], '') + +odir = sys.argv[1] + +with open(os.path.join(odir, 'mylib.h'), 'w') as f: + f.write('int func(void);\n') +with open(os.path.join(odir, 'mylib.c'), 'w') as f: + f.write('''int func(void) { + return 0; +} +''') diff --git a/meson/test cases/common/54 custom target source output/main.c b/meson/test cases/common/54 custom target source output/main.c new file mode 100644 index 000000000..bca138713 --- /dev/null +++ b/meson/test cases/common/54 custom target source output/main.c @@ -0,0 +1,5 @@ +#include"mylib.h" + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/54 custom target source output/meson.build b/meson/test cases/common/54 custom target source output/meson.build new file mode 100644 index 000000000..f9d039df0 --- /dev/null +++ b/meson/test cases/common/54 custom target source output/meson.build @@ -0,0 +1,9 @@ +project('source generation', 'c') + +ct = custom_target('gen', +output : ['mylib.h', 'mylib.c'], +command : [find_program('generator.py'), '@OUTDIR@'], +) + +e = executable('prog', 'main.c', ct) +test('gentest', e) diff --git a/meson/test cases/common/55 exe static shared/meson.build b/meson/test cases/common/55 exe static shared/meson.build new file mode 100644 index 000000000..69ede5e87 --- /dev/null +++ b/meson/test cases/common/55 exe static shared/meson.build @@ -0,0 +1,15 @@ +project('statchain', 'c') + +subdir('subdir') +# Test that -fPIC in c_args is also accepted (on platforms where it's permitted) +picflag = [] +if not ['darwin', 'windows'].contains(host_machine.system()) + picflag = ['-fPIC'] +endif +statlib2 = static_library('stat2', 'stat2.c', c_args : picflag, pic : false) +# Test that pic is needed for both direct and indirect static library +# dependencies of shared libraries (on Linux and BSD) +statlib = static_library('stat', 'stat.c', link_with : [shlib, statlib2], pic : true) +shlib2 = shared_library('shr2', 'shlib2.c', link_with : statlib) +exe = executable('prog', 'prog.c', link_with : shlib2) +test('runtest', exe) diff --git a/meson/test cases/common/55 exe static shared/prog.c b/meson/test cases/common/55 exe static shared/prog.c new file mode 100644 index 000000000..6dba60d7c --- /dev/null +++ b/meson/test cases/common/55 exe static shared/prog.c @@ -0,0 +1,10 @@ +int shlibfunc2(void); +int statlibfunc(void); + +int main(void) { + if (statlibfunc() != 42) + return 1; + if (shlibfunc2() != 24) + return 1; + return 0; +} diff --git a/meson/test cases/common/55 exe static shared/shlib2.c b/meson/test cases/common/55 exe static shared/shlib2.c new file mode 100644 index 000000000..12bc913d7 --- /dev/null +++ b/meson/test cases/common/55 exe static shared/shlib2.c @@ -0,0 +1,8 @@ +#include "subdir/exports.h" + +int statlibfunc(void); +int statlibfunc2(void); + +int DLL_PUBLIC shlibfunc2(void) { + return statlibfunc() - statlibfunc2(); +} diff --git a/meson/test cases/common/55 exe static shared/stat.c b/meson/test cases/common/55 exe static shared/stat.c new file mode 100644 index 000000000..eddc4d816 --- /dev/null +++ b/meson/test cases/common/55 exe static shared/stat.c @@ -0,0 +1,7 @@ +#include "subdir/exports.h" + +int shlibfunc(void); + +int DLL_PUBLIC statlibfunc(void) { + return shlibfunc(); +} diff --git a/meson/test cases/common/55 exe static shared/stat2.c b/meson/test cases/common/55 exe static shared/stat2.c new file mode 100644 index 000000000..4abb49ffd --- /dev/null +++ b/meson/test cases/common/55 exe static shared/stat2.c @@ -0,0 +1,3 @@ +int statlibfunc2(void) { + return 18; +} diff --git a/meson/test cases/common/55 exe static shared/subdir/exports.h b/meson/test cases/common/55 exe static shared/subdir/exports.h new file mode 100644 index 000000000..c89ccb23b --- /dev/null +++ b/meson/test cases/common/55 exe static shared/subdir/exports.h @@ -0,0 +1,12 @@ +#pragma once + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif diff --git a/meson/test cases/common/55 exe static shared/subdir/meson.build b/meson/test cases/common/55 exe static shared/subdir/meson.build new file mode 100644 index 000000000..2b7393b4e --- /dev/null +++ b/meson/test cases/common/55 exe static shared/subdir/meson.build @@ -0,0 +1 @@ +shlib = shared_library('shar', 'shlib.c') diff --git a/meson/test cases/common/55 exe static shared/subdir/shlib.c b/meson/test cases/common/55 exe static shared/subdir/shlib.c new file mode 100644 index 000000000..dd9c6b2db --- /dev/null +++ b/meson/test cases/common/55 exe static shared/subdir/shlib.c @@ -0,0 +1,5 @@ +#include "exports.h" + +int DLL_PUBLIC shlibfunc(void) { + return 42; +} diff --git a/meson/test cases/common/56 array methods/a.txt b/meson/test cases/common/56 array methods/a.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/56 array methods/b.txt b/meson/test cases/common/56 array methods/b.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/56 array methods/c.txt b/meson/test cases/common/56 array methods/c.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/56 array methods/meson.build b/meson/test cases/common/56 array methods/meson.build new file mode 100644 index 000000000..99855bce0 --- /dev/null +++ b/meson/test cases/common/56 array methods/meson.build @@ -0,0 +1,70 @@ +project('array methods', 'c') + +empty = [] +one = ['abc'] +two = ['def', 'ghi'] +combined = [empty, one, two] + +file_list = files('a.txt', 'b.txt') +file_a = files('a.txt') +file_c = files('c.txt') + +if file_a[0] != file_list[0] + error('Files are not equal') +endif + +if not file_list.contains(file_a[0]) + error('Contains with ObjectHolder lists does not work') +endif + +if file_list.contains(file_c[0]) + error('Contains with ObjectHolder lists found non existant object') +endif + +if empty.contains('abc') + error('Empty is not empty.') +endif + +if one.contains('a') + error('One claims to contain a') +endif + +if not one.contains('abc') + error('One claims to not contain abc.') +endif + +if one.contains('abcd') + error('One claims to contain abcd.') +endif + +if two.contains('abc') + error('Two claims to contain abc.') +endif + +if not two.contains('def') + error('Two claims not to contain def.') +endif + +if not two.contains('ghi') + error('Two claims not to contain ghi.') +endif + +if two.contains('defg') + error('Two claims to contain defg.') +endif + +if not combined.contains('abc') + error('Combined claims not to contain abc.') +endif + +if not combined.contains(one) + error('Combined claims not to contain [abc].') +endif + +if not combined.contains(two) + error('Combined claims not to contain [def, ghi].') +endif + +if not combined.contains('ghi') + error('Combined claims not to contain ghi.') +endif diff --git a/meson/test cases/common/57 custom header generator/input.def b/meson/test cases/common/57 custom header generator/input.def new file mode 100644 index 000000000..573541ac9 --- /dev/null +++ b/meson/test cases/common/57 custom header generator/input.def @@ -0,0 +1 @@ +0 diff --git a/meson/test cases/common/57 custom header generator/makeheader.py b/meson/test cases/common/57 custom header generator/makeheader.py new file mode 100644 index 000000000..f15683447 --- /dev/null +++ b/meson/test cases/common/57 custom header generator/makeheader.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 + +# NOTE: this file does not have the executable bit set. This tests that +# Meson can automatically parse shebang lines. + +import sys + +template = '#define RET_VAL %s\n' +with open(sys.argv[1]) as f: + output = template % (f.readline().strip(), ) +with open(sys.argv[2], 'w') as f: + f.write(output) diff --git a/meson/test cases/common/57 custom header generator/meson.build b/meson/test cases/common/57 custom header generator/meson.build new file mode 100644 index 000000000..d43915a3c --- /dev/null +++ b/meson/test cases/common/57 custom header generator/meson.build @@ -0,0 +1,21 @@ +project('custom header generator', 'c') + +cc_id = meson.get_compiler('c').get_id() +cc_ver = meson.get_compiler('c').version() + +if cc_id == 'intel' or (cc_id == 'lcc' and cc_ver.version_compare('<=1.23.08')) + # ICC and LCC <= 1.23.08 do not escape spaces in paths in the dependency file, so Ninja + # (correctly) thinks that the rule has multiple outputs and errors out: + # 'depfile has multiple output paths' + error('MESON_SKIP_TEST: Skipping test because your compiler is known to generate broken dependency files') +endif + +gen = find_program('makeheader.py') + +generated_h = custom_target('makeheader.py', + output : 'myheader.lh', # Suffix not .h to ensure this works with custom suffixes, too. + input : 'input.def', + command : [gen, '@INPUT0@', '@OUTPUT0@', files('somefile.txt')]) + +prog = executable('prog', 'prog.c', generated_h) +test('gentest', prog) diff --git a/meson/test cases/common/57 custom header generator/prog.c b/meson/test cases/common/57 custom header generator/prog.c new file mode 100644 index 000000000..acd0ff79b --- /dev/null +++ b/meson/test cases/common/57 custom header generator/prog.c @@ -0,0 +1,5 @@ +#include"myheader.lh" + +int main(void) { + return RET_VAL; +} diff --git a/meson/test cases/common/57 custom header generator/somefile.txt b/meson/test cases/common/57 custom header generator/somefile.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/58 multiple generators/data2.dat b/meson/test cases/common/58 multiple generators/data2.dat new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/meson/test cases/common/58 multiple generators/data2.dat @@ -0,0 +1 @@ +2 diff --git a/meson/test cases/common/58 multiple generators/main.cpp b/meson/test cases/common/58 multiple generators/main.cpp new file mode 100644 index 000000000..f1a01bd5f --- /dev/null +++ b/meson/test cases/common/58 multiple generators/main.cpp @@ -0,0 +1,6 @@ +#include"source1.h" +#include"source2.h" + +int main(void) { + return func1() + func2(); +} diff --git a/meson/test cases/common/58 multiple generators/meson.build b/meson/test cases/common/58 multiple generators/meson.build new file mode 100644 index 000000000..66f7fa9c1 --- /dev/null +++ b/meson/test cases/common/58 multiple generators/meson.build @@ -0,0 +1,13 @@ +project('trickier generator', 'cpp') + +comp = find_program('mygen.py') +subdir('subdir') + +generated2 = custom_target('generated2', + output : ['source2.h', 'source2.cpp'], + input : 'data2.dat', + command : [comp, '@INPUT0@', '@OUTDIR@']) + +exe = executable('prog', 'main.cpp', generated, generated2, + include_directories : include_directories('subdir')) + test('generated test', exe) diff --git a/meson/test cases/common/58 multiple generators/mygen.py b/meson/test cases/common/58 multiple generators/mygen.py new file mode 100755 index 000000000..99dc33136 --- /dev/null +++ b/meson/test cases/common/58 multiple generators/mygen.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 + +import sys, os + +if len(sys.argv) != 3: + print("You is fail.") + sys.exit(1) + +with open(sys.argv[1]) as f: + val = f.read().strip() +outdir = sys.argv[2] + +outhdr = os.path.join(outdir, 'source%s.h' % val) +outsrc = os.path.join(outdir, 'source%s.cpp' % val) + +with open(outhdr, 'w') as f: + f.write('int func%s();\n' % val) +with open(outsrc, 'w') as f: + f.write('''int func%s() { + return 0; +} +''' % val) diff --git a/meson/test cases/common/58 multiple generators/subdir/data.dat b/meson/test cases/common/58 multiple generators/subdir/data.dat new file mode 100644 index 000000000..d00491fd7 --- /dev/null +++ b/meson/test cases/common/58 multiple generators/subdir/data.dat @@ -0,0 +1 @@ +1 diff --git a/meson/test cases/common/58 multiple generators/subdir/meson.build b/meson/test cases/common/58 multiple generators/subdir/meson.build new file mode 100644 index 000000000..2456ecb6d --- /dev/null +++ b/meson/test cases/common/58 multiple generators/subdir/meson.build @@ -0,0 +1,4 @@ +generated = custom_target('generated', +output : ['source1.h', 'source1.cpp'], +input : 'data.dat', +command : [comp, '@INPUT0@', '@OUTDIR@']) diff --git a/meson/test cases/common/59 install subdir/meson.build b/meson/test cases/common/59 install subdir/meson.build new file mode 100644 index 000000000..59f3b4dc5 --- /dev/null +++ b/meson/test cases/common/59 install subdir/meson.build @@ -0,0 +1,21 @@ +project('install a whole subdir', 'c', + default_options : ['install_umask=preserve']) + +# A subdir with an exclusion: +install_subdir('sub2', + exclude_files : ['excluded-three.dat'], + exclude_directories : ['excluded'], + install_dir : 'share') + +subdir('subdir') +# A subdir with write perms only for the owner +# and read-list perms for owner and group +install_subdir('sub1', install_dir : 'share', install_mode : ['rwxr-x--t', 'root']) +install_subdir('sub/sub1', install_dir : 'share') + +# strip_directory +install_subdir('sub_elided', install_dir : 'share', strip_directory : true) +install_subdir('nested_elided/sub', install_dir : 'share', strip_directory : true) + +# Create new empty directory that doesn't exist in the source tree +install_subdir('new_directory', install_dir : 'share') diff --git a/meson/test cases/common/59 install subdir/nested_elided/sub/dircheck/ninth.dat b/meson/test cases/common/59 install subdir/nested_elided/sub/dircheck/ninth.dat new file mode 100644 index 000000000..c4eaca780 --- /dev/null +++ b/meson/test cases/common/59 install subdir/nested_elided/sub/dircheck/ninth.dat @@ -0,0 +1 @@ +Nested file under nested elided directory. diff --git a/meson/test cases/common/59 install subdir/nested_elided/sub/eighth.dat b/meson/test cases/common/59 install subdir/nested_elided/sub/eighth.dat new file mode 100644 index 000000000..fa9b7b77f --- /dev/null +++ b/meson/test cases/common/59 install subdir/nested_elided/sub/eighth.dat @@ -0,0 +1 @@ +File in nested elided directory. diff --git a/meson/test cases/common/59 install subdir/sub/sub1/third.dat b/meson/test cases/common/59 install subdir/sub/sub1/third.dat new file mode 100644 index 000000000..5ccbc43c7 --- /dev/null +++ b/meson/test cases/common/59 install subdir/sub/sub1/third.dat @@ -0,0 +1 @@ +This is a third data file for sub1 dir. diff --git a/meson/test cases/common/59 install subdir/sub1/second.dat b/meson/test cases/common/59 install subdir/sub1/second.dat new file mode 100644 index 000000000..48857a8b6 --- /dev/null +++ b/meson/test cases/common/59 install subdir/sub1/second.dat @@ -0,0 +1 @@ +Test that multiple install_subdirs meld their results. \ No newline at end of file diff --git a/meson/test cases/common/59 install subdir/sub2/dircheck/excluded-three.dat b/meson/test cases/common/59 install subdir/sub2/dircheck/excluded-three.dat new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/59 install subdir/sub2/excluded-three.dat b/meson/test cases/common/59 install subdir/sub2/excluded-three.dat new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/59 install subdir/sub2/excluded/two.dat b/meson/test cases/common/59 install subdir/sub2/excluded/two.dat new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/59 install subdir/sub2/one.dat b/meson/test cases/common/59 install subdir/sub2/one.dat new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/59 install subdir/sub_elided/dircheck/fifth.dat b/meson/test cases/common/59 install subdir/sub_elided/dircheck/fifth.dat new file mode 100644 index 000000000..b6ca0098f --- /dev/null +++ b/meson/test cases/common/59 install subdir/sub_elided/dircheck/fifth.dat @@ -0,0 +1 @@ +Data file in a subdir of elided directory. diff --git a/meson/test cases/common/59 install subdir/sub_elided/fourth.dat b/meson/test cases/common/59 install subdir/sub_elided/fourth.dat new file mode 100644 index 000000000..ca5f26a62 --- /dev/null +++ b/meson/test cases/common/59 install subdir/sub_elided/fourth.dat @@ -0,0 +1 @@ +Test that this file is installed directly into install_dir. diff --git a/meson/test cases/common/59 install subdir/subdir/meson.build b/meson/test cases/common/59 install subdir/subdir/meson.build new file mode 100644 index 000000000..0f81cdb8f --- /dev/null +++ b/meson/test cases/common/59 install subdir/subdir/meson.build @@ -0,0 +1,5 @@ +install_subdir('sub1', install_dir : 'share', + # This mode will be overridden by the mode set in the outer install_subdir + install_mode : 'rwxr-x---') + +install_subdir('sub_elided', install_dir : 'share', strip_directory : true) diff --git a/meson/test cases/common/59 install subdir/subdir/sub1/data1.dat b/meson/test cases/common/59 install subdir/subdir/sub1/data1.dat new file mode 100644 index 000000000..d83c370e3 --- /dev/null +++ b/meson/test cases/common/59 install subdir/subdir/sub1/data1.dat @@ -0,0 +1 @@ +This is a data file in a subdir. diff --git a/meson/test cases/common/59 install subdir/subdir/sub1/sub2/data2.dat b/meson/test cases/common/59 install subdir/subdir/sub1/sub2/data2.dat new file mode 100644 index 000000000..8ce1392b0 --- /dev/null +++ b/meson/test cases/common/59 install subdir/subdir/sub1/sub2/data2.dat @@ -0,0 +1 @@ +This is a data file in a deeper subdir. diff --git a/meson/test cases/common/59 install subdir/subdir/sub_elided/dircheck/seventh.dat b/meson/test cases/common/59 install subdir/subdir/sub_elided/dircheck/seventh.dat new file mode 100644 index 000000000..ea0b8dc5c --- /dev/null +++ b/meson/test cases/common/59 install subdir/subdir/sub_elided/dircheck/seventh.dat @@ -0,0 +1 @@ +Nested file in a subdir. diff --git a/meson/test cases/common/59 install subdir/subdir/sub_elided/sixth.dat b/meson/test cases/common/59 install subdir/subdir/sub_elided/sixth.dat new file mode 100644 index 000000000..140f07560 --- /dev/null +++ b/meson/test cases/common/59 install subdir/subdir/sub_elided/sixth.dat @@ -0,0 +1 @@ +Elide test file in a subdir. diff --git a/meson/test cases/common/59 install subdir/test.json b/meson/test cases/common/59 install subdir/test.json new file mode 100644 index 000000000..0dd885cb5 --- /dev/null +++ b/meson/test cases/common/59 install subdir/test.json @@ -0,0 +1,17 @@ +{ + "installed": [ + {"type": "file", "file": "usr/share/dircheck/fifth.dat"}, + {"type": "file", "file": "usr/share/dircheck/seventh.dat"}, + {"type": "file", "file": "usr/share/dircheck/ninth.dat"}, + {"type": "file", "file": "usr/share/eighth.dat"}, + {"type": "file", "file": "usr/share/fourth.dat"}, + {"type": "file", "file": "usr/share/sixth.dat"}, + {"type": "file", "file": "usr/share/sub1/data1.dat"}, + {"type": "file", "file": "usr/share/sub1/second.dat"}, + {"type": "file", "file": "usr/share/sub1/third.dat"}, + {"type": "file", "file": "usr/share/sub1/sub2/data2.dat"}, + {"type": "file", "file": "usr/share/sub2/one.dat"}, + {"type": "file", "file": "usr/share/sub2/dircheck/excluded-three.dat"}, + {"type": "dir", "file": "usr/share/new_directory"} + ] +} diff --git a/meson/test cases/common/6 linkshared/cpplib.cpp b/meson/test cases/common/6 linkshared/cpplib.cpp new file mode 100644 index 000000000..247f8201a --- /dev/null +++ b/meson/test cases/common/6 linkshared/cpplib.cpp @@ -0,0 +1,6 @@ +#define BUILDING_DLL +#include "cpplib.h" + +int DLL_PUBLIC cppfunc(void) { + return 42; +} diff --git a/meson/test cases/common/6 linkshared/cpplib.h b/meson/test cases/common/6 linkshared/cpplib.h new file mode 100644 index 000000000..e2b02060d --- /dev/null +++ b/meson/test cases/common/6 linkshared/cpplib.h @@ -0,0 +1,12 @@ +/* See http://gcc.gnu.org/wiki/Visibility#How_to_use_the_new_C.2B-.2B-_visibility_support */ +#if defined(_WIN32) || defined(__CYGWIN__) + #ifdef BUILDING_DLL + #define DLL_PUBLIC __declspec(dllexport) + #else + #define DLL_PUBLIC __declspec(dllimport) + #endif +#else + #define DLL_PUBLIC __attribute__ ((visibility ("default"))) +#endif + +int DLL_PUBLIC cppfunc(void); diff --git a/meson/test cases/common/6 linkshared/cppmain.cpp b/meson/test cases/common/6 linkshared/cppmain.cpp new file mode 100644 index 000000000..29e9a4480 --- /dev/null +++ b/meson/test cases/common/6 linkshared/cppmain.cpp @@ -0,0 +1,5 @@ +#include "cpplib.h" + +int main(void) { + return cppfunc() != 42; +} diff --git a/meson/test cases/common/6 linkshared/libfile.c b/meson/test cases/common/6 linkshared/libfile.c new file mode 100644 index 000000000..91489b287 --- /dev/null +++ b/meson/test cases/common/6 linkshared/libfile.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func(void) { + return 0; +} diff --git a/meson/test cases/common/6 linkshared/main.c b/meson/test cases/common/6 linkshared/main.c new file mode 100644 index 000000000..77773274e --- /dev/null +++ b/meson/test cases/common/6 linkshared/main.c @@ -0,0 +1,11 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_IMPORT __declspec(dllimport) +#else + #define DLL_IMPORT +#endif + +int DLL_IMPORT func(void); + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/6 linkshared/meson.build b/meson/test cases/common/6 linkshared/meson.build new file mode 100644 index 000000000..846b4a09b --- /dev/null +++ b/meson/test cases/common/6 linkshared/meson.build @@ -0,0 +1,12 @@ +project('shared library linking test', 'c', 'cpp') + +lib = shared_library('mylib', + 'libfile.c' # Split to different lines before and after the comma to test parser. + , install : false) # Don't install libraries in common tests; the path is platform-specific +exe = executable('prog', 'main.c', link_with : lib, install : true) + +test('runtest', exe) + +cpplib = shared_library('mycpplib', 'cpplib.cpp') +cppexe = executable('cppprog', 'cppmain.cpp', link_with : cpplib) +test('cpptest', cppexe) diff --git a/meson/test cases/common/6 linkshared/test.json b/meson/test cases/common/6 linkshared/test.json new file mode 100644 index 000000000..067bca760 --- /dev/null +++ b/meson/test cases/common/6 linkshared/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + { "type": "exe", "file": "usr/bin/prog" }, + { "type": "pdb", "file": "usr/bin/prog" } + ] +} diff --git a/meson/test cases/common/60 foreach/meson.build b/meson/test cases/common/60 foreach/meson.build new file mode 100644 index 000000000..af60e0fbb --- /dev/null +++ b/meson/test cases/common/60 foreach/meson.build @@ -0,0 +1,53 @@ +project('foreach', 'c') + +tests = [['test1', 'prog1', 'prog1.c'], + ['test2', 'prog2', 'prog2.c', 'fallback'], + ['test3', 'prog3', 'prog3.c', 'urgh']] + +assert(tests[0].get(3, 'fallbck') == 'fallbck', 'array #1 fallback did not match') +assert(tests[1].get(3, 'failbk') == 'fallback', 'array #2 value did not match') +assert(tests[2].get(3, 'urgh') == 'urgh', 'array #3 value did not match') + +foreach i : tests + test(i.get(0), executable(i.get(1), i.get(2), install : true)) + + # Ensure that changing the tests variable does not + # affect ongoing iteration in the foreach loop. + # + # Being able to do that would make Meson Turing complete and + # we definitely don't want that. + tests = ['test4', 'prog4', 'prog4.c'] +endforeach + +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach + +assert(result == ['a', 'b'], 'Continue or break in foreach failed') + +items = [] +iter = range(2) +foreach i : iter + items += i +endforeach +assert(items == [0, 1]) +assert(iter[1] == 1) + +items = [] +foreach i : range(1, 2) + items += i +endforeach +assert(items == [1]) + +items = [] +foreach i : range(1, 10, 2) + items += i +endforeach +assert(items == [1, 3, 5, 7, 9]) diff --git a/meson/test cases/common/60 foreach/prog1.c b/meson/test cases/common/60 foreach/prog1.c new file mode 100644 index 000000000..339dc494b --- /dev/null +++ b/meson/test cases/common/60 foreach/prog1.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("This is test #1.\n"); + return 0; +} diff --git a/meson/test cases/common/60 foreach/prog2.c b/meson/test cases/common/60 foreach/prog2.c new file mode 100644 index 000000000..c2132883a --- /dev/null +++ b/meson/test cases/common/60 foreach/prog2.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("This is test #2.\n"); + return 0; +} diff --git a/meson/test cases/common/60 foreach/prog3.c b/meson/test cases/common/60 foreach/prog3.c new file mode 100644 index 000000000..905a530c7 --- /dev/null +++ b/meson/test cases/common/60 foreach/prog3.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("This is test #3.\n"); + return 0; +} diff --git a/meson/test cases/common/60 foreach/test.json b/meson/test cases/common/60 foreach/test.json new file mode 100644 index 000000000..2fc952d2c --- /dev/null +++ b/meson/test cases/common/60 foreach/test.json @@ -0,0 +1,10 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/prog1"}, + {"type": "pdb", "file": "usr/bin/prog1"}, + {"type": "exe", "file": "usr/bin/prog2"}, + {"type": "pdb", "file": "usr/bin/prog2"}, + {"type": "exe", "file": "usr/bin/prog3"}, + {"type": "pdb", "file": "usr/bin/prog3"} + ] +} diff --git a/meson/test cases/common/61 number arithmetic/meson.build b/meson/test cases/common/61 number arithmetic/meson.build new file mode 100644 index 000000000..e31d7e4e9 --- /dev/null +++ b/meson/test cases/common/61 number arithmetic/meson.build @@ -0,0 +1,76 @@ +project('number arithmetic', 'c') + +if 6 + 4 != 10 + error('Number addition is broken') +endif +if 6 - 4 != 2 + error('Number subtraction is broken') +endif + +if 6 * 4 != 24 + error('Number multiplication is broken') +endif +if 16 / 4 != 4 + error('Number division is broken') +endif + +#if (1 / 3) * 3 != 1 +# error('Float interconversion broken') +#endif +if (5 / 3) * 3 != 3 + error('Integer division is broken') +endif + +assert((5 % 2) == 1, 'Integer modulo (odd) is broken') +assert((4 % 2) == 0, 'Integer modulo (even) is broken') + +if 2 * 1 % 2 != 0 + error('Modulo precedence with multiplication is broken') +endif +if 2 + 1 % 2 != 3 + error('Modulo precedence with addition is broken') +endif +if 9 / 9 % 2 != 1 + error('Modulo precedence with division is broken') +endif +if 9 - 9 % 2 != 8 + error('Modulo precedence with subtraction is broken') +endif + +assert(2.is_even(), 'int is_even() broken for even value') +assert(not(2.is_odd()), 'int is_odd() broken for even value') +assert(not(3.is_even()), 'int is_even() broken for odd value') +assert(3.is_odd(), 'int is_odd() broken for odd value') + +assert(3 < 4, 'Lt broken') +assert(not(4 < 3), 'Lt broken') +assert(3 <= 4, 'Lte broken') +assert(not(4 <= 3), 'Lte broken') +assert(3 <= 3, 'Lte broken') + +assert(4 > 3, 'Gt broken') +assert(not(3 > 4), 'Gt broken') +assert(4 >= 3, 'Gte broken') +assert(not(3 >= 4), 'Gte broken') +assert(3 >= 3, 'Gte broken') + +assert(true.to_int() == 1,'bool to_int() broken') +assert(false.to_int() == 0,'bool to_int() broken') + +hex_255 = 0xff +hex2_255 = 0XFF + +assert(hex_255 == 255, 'Hex parsing is broken.') +assert(hex2_255 == 255, 'Uppercase hex parsing is broken.') + +bin_123 = 0b1111011 +bin2_123 = 0B1111011 + +assert(bin_123 == 123, 'Bin number parsing is broken.') +assert(bin2_123 == 123, 'Uppercase bin number parsing is broken.') + +oct_493 = 0o755 +oct2_493 = 0O755 + +assert(oct_493 == 493, 'Oct number parsing is broken.') +assert(oct2_493 == 493, 'Uppercase oct number parsing is broken.') diff --git a/meson/test cases/common/62 string arithmetic/meson.build b/meson/test cases/common/62 string arithmetic/meson.build new file mode 100644 index 000000000..59567de49 --- /dev/null +++ b/meson/test cases/common/62 string arithmetic/meson.build @@ -0,0 +1,16 @@ +project('string arithmetic', 'c') + +if 'foo' + 'bar' != 'foobar' + error('String concatenation is broken') +endif + +if 'foo' + 'bar' + 'baz' != 'foobarbaz' + error('Many-string concatenation is broken') +endif + +a = 'a' +b = 'b' + +if a + b + 'c' != 'abc' + error('String concat with variables is broken.') +endif diff --git a/meson/test cases/common/63 array arithmetic/meson.build b/meson/test cases/common/63 array arithmetic/meson.build new file mode 100644 index 000000000..8b8785afc --- /dev/null +++ b/meson/test cases/common/63 array arithmetic/meson.build @@ -0,0 +1,15 @@ +project('array arithmetic', 'c') + +array1 = ['foo', 'bar'] +array2 = ['qux', 'baz'] + +if array1 + array2 != ['foo', 'bar', 'qux', 'baz'] + error('Array concatenation is broken') +endif +if array2 + array1 != ['qux', 'baz', 'foo', 'bar'] + error('Array concatenation is broken') +endif + +if array1 + array1 + array1 != ['foo', 'bar', 'foo', 'bar', 'foo', 'bar'] + error('Many-array concatenation is broken') +endif diff --git a/meson/test cases/common/64 arithmetic bidmas/meson.build b/meson/test cases/common/64 arithmetic bidmas/meson.build new file mode 100644 index 000000000..c7334b48f --- /dev/null +++ b/meson/test cases/common/64 arithmetic bidmas/meson.build @@ -0,0 +1,15 @@ +project('arithmetic bidmas', 'c') + +if 5 * 3 - 6 / 2 + 1 != 13 + error('Arithmetic bidmas broken') +endif +if 5 * (3 - 6 / 2) + 1 != 1 + error('Arithmetic bidmas with brackets broken') +endif + +if 5 * 12 / 2 * 3 != 90 + error('Sequential multiplication and division broken') +endif +if 5 * (12 / (2 * 3)) != 10 + error('Sequential multiplication and division with brackets broken') +endif diff --git a/meson/test cases/common/65 build always/main.c b/meson/test cases/common/65 build always/main.c new file mode 100644 index 000000000..a9ee55ea1 --- /dev/null +++ b/meson/test cases/common/65 build always/main.c @@ -0,0 +1,7 @@ +#include +#include"version.h" + +int main(void) { + printf("Version is %s.\n", version_string); + return 0; +} diff --git a/meson/test cases/common/65 build always/meson.build b/meson/test cases/common/65 build always/meson.build new file mode 100644 index 000000000..f720c8943 --- /dev/null +++ b/meson/test cases/common/65 build always/meson.build @@ -0,0 +1,14 @@ +project('run always', 'c') + +version = '1.0.0' + +vgen = find_program('version_gen.py') + +version_src = custom_target('Version string', +input : 'version.c.in', +output : 'version.c', +command : [vgen, '@INPUT@', '@OUTPUT@', version], +build_always : true, +) + +executable('versionprinter', 'main.c', version_src) diff --git a/meson/test cases/common/65 build always/version.c.in b/meson/test cases/common/65 build always/version.c.in new file mode 100644 index 000000000..619e51786 --- /dev/null +++ b/meson/test cases/common/65 build always/version.c.in @@ -0,0 +1,3 @@ +#include"version.h" + +const char *version_string = "@VERSION@"; diff --git a/meson/test cases/common/65 build always/version.h b/meson/test cases/common/65 build always/version.h new file mode 100644 index 000000000..7d433f035 --- /dev/null +++ b/meson/test cases/common/65 build always/version.h @@ -0,0 +1,3 @@ +#pragma once + +extern const char *version_string; diff --git a/meson/test cases/common/65 build always/version_gen.py b/meson/test cases/common/65 build always/version_gen.py new file mode 100755 index 000000000..fbe2df93f --- /dev/null +++ b/meson/test cases/common/65 build always/version_gen.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 + +import sys, os, subprocess + +def generate(infile, outfile, fallback): + workdir = os.path.split(infile)[0] + if workdir == '': + workdir = '.' + try: + version = subprocess.check_output(['git', 'describe'], cwd=workdir).decode().strip() + except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): + version = fallback + with open(infile) as f: + newdata = f.read().replace('@VERSION@', version) + try: + with open(outfile) as f: + olddata = f.read() + if olddata == newdata: + return + except OSError: + pass + with open(outfile, 'w') as f: + f.write(newdata) + +if __name__ == '__main__': + infile = sys.argv[1] + outfile = sys.argv[2] + fallback = sys.argv[3] + generate(infile, outfile, fallback) diff --git a/meson/test cases/common/66 vcstag/meson.build b/meson/test cases/common/66 vcstag/meson.build new file mode 100644 index 000000000..7e5983aaa --- /dev/null +++ b/meson/test cases/common/66 vcstag/meson.build @@ -0,0 +1,18 @@ +project('vcstag', 'c') + +version_src = vcs_tag(input : 'vcstag.c.in', +output : 'vcstag.c', +fallback : '1.0.0') + +version_src_custom = vcs_tag(input : 'vcstag.c.in', +output : 'vcstag-custom.c', +command : ['git', 'show-ref', '-s', 'refs/heads/master'], +fallback : '1.0.0') + +version_src_fallback = vcs_tag(input : 'vcstag.c.in', +output : 'vcstag-fallback.c') + +executable('tagprog', 'tagprog.c', version_src) +executable('tagprog-custom', 'tagprog.c', version_src_custom) +executable('tagprog-fallback', 'tagprog.c', version_src_fallback) + diff --git a/meson/test cases/common/66 vcstag/tagprog.c b/meson/test cases/common/66 vcstag/tagprog.c new file mode 100644 index 000000000..27c3cc58d --- /dev/null +++ b/meson/test cases/common/66 vcstag/tagprog.c @@ -0,0 +1,9 @@ +#include + +extern const char *vcstag; + +int main(void) { + printf("Version is %s\n", vcstag); + return 0; +} + diff --git a/meson/test cases/common/66 vcstag/vcstag.c.in b/meson/test cases/common/66 vcstag/vcstag.c.in new file mode 100644 index 000000000..09192d90d --- /dev/null +++ b/meson/test cases/common/66 vcstag/vcstag.c.in @@ -0,0 +1,2 @@ +const char *vcstag = "@VCS_TAG@"; + diff --git a/meson/test cases/common/67 modules/meson.build b/meson/test cases/common/67 modules/meson.build new file mode 100644 index 000000000..ad33ed6d4 --- /dev/null +++ b/meson/test cases/common/67 modules/meson.build @@ -0,0 +1,14 @@ +project('module test', 'c') + +modtest = import('modtest') +modtest.print_hello() +assert(modtest.found()) + +modtest = import('modtest', required : get_option('disabled')) +assert(not modtest.found()) + +notfound = import('not-found', required : false) +assert(not notfound.found()) + +disabled = import('not-found', required : false, disabler : true) +assert(is_disabler(disabled)) diff --git a/meson/test cases/common/67 modules/meson_options.txt b/meson/test cases/common/67 modules/meson_options.txt new file mode 100644 index 000000000..06711447d --- /dev/null +++ b/meson/test cases/common/67 modules/meson_options.txt @@ -0,0 +1,6 @@ +option( + 'disabled', + type : 'feature', + value : 'disabled', + description : 'test disabled' +) diff --git a/meson/test cases/common/68 should fail/failing.c b/meson/test cases/common/68 should fail/failing.c new file mode 100644 index 000000000..3e70e5079 --- /dev/null +++ b/meson/test cases/common/68 should fail/failing.c @@ -0,0 +1,3 @@ +int main(void) { + return 1; +} diff --git a/meson/test cases/common/68 should fail/meson.build b/meson/test cases/common/68 should fail/meson.build new file mode 100644 index 000000000..dffbbb381 --- /dev/null +++ b/meson/test cases/common/68 should fail/meson.build @@ -0,0 +1,4 @@ +project('should fail', 'c') + +exe = executable('prog', 'failing.c') +test('failing', exe, should_fail : true) diff --git a/meson/test cases/common/69 configure file in custom target/inc/confdata.in b/meson/test cases/common/69 configure file in custom target/inc/confdata.in new file mode 100644 index 000000000..e44cdea20 --- /dev/null +++ b/meson/test cases/common/69 configure file in custom target/inc/confdata.in @@ -0,0 +1 @@ +@VALUE@ diff --git a/meson/test cases/common/69 configure file in custom target/inc/meson.build b/meson/test cases/common/69 configure file in custom target/inc/meson.build new file mode 100644 index 000000000..05d2dcb8a --- /dev/null +++ b/meson/test cases/common/69 configure file in custom target/inc/meson.build @@ -0,0 +1,6 @@ +cdata = configuration_data() +cdata.set('VALUE', '42') + +cfile = configure_file(input : 'confdata.in', +output : 'confdata', +configuration : cdata) diff --git a/meson/test cases/common/69 configure file in custom target/meson.build b/meson/test cases/common/69 configure file in custom target/meson.build new file mode 100644 index 000000000..0a850a1a6 --- /dev/null +++ b/meson/test cases/common/69 configure file in custom target/meson.build @@ -0,0 +1,4 @@ +project('conf file in custom target', 'c') + +subdir('inc') +subdir('src') diff --git a/meson/test cases/common/69 configure file in custom target/src/meson.build b/meson/test cases/common/69 configure file in custom target/src/meson.build new file mode 100644 index 000000000..e0ab9ebb2 --- /dev/null +++ b/meson/test cases/common/69 configure file in custom target/src/meson.build @@ -0,0 +1,20 @@ +custom_target('thing', +output : 'final.dat', +input : cfile, +command : [find_program('mycompiler.py'), '@INPUT@', '@OUTPUT@']) + +# Test usage of a `configure_file` as part of the command list +py3 = find_program('python3', required : false) +if not py3.found() + # Maybe 'python' is Python 3 + py3 = find_program('python') +endif + +compiler = configure_file(input : 'mycompiler.py', + output : 'mycompiler2.py', + copy: true) + +custom_target('thing2', +output : 'final2.dat', +input : cfile, +command : [py3, compiler, '@INPUT@', '@OUTPUT@']) diff --git a/meson/test cases/common/69 configure file in custom target/src/mycompiler.py b/meson/test cases/common/69 configure file in custom target/src/mycompiler.py new file mode 100644 index 000000000..b00c862db --- /dev/null +++ b/meson/test cases/common/69 configure file in custom target/src/mycompiler.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[1]) as ifile: + if ifile.readline().strip() != '42': + print('Incorrect input') +with open(sys.argv[2], 'w') as ofile: + ofile.write('Success\n') diff --git a/meson/test cases/common/7 mixed/func.c b/meson/test cases/common/7 mixed/func.c new file mode 100644 index 000000000..c76c23dbc --- /dev/null +++ b/meson/test cases/common/7 mixed/func.c @@ -0,0 +1,4 @@ +int func(void) { + int class = 0; + return class; +} diff --git a/meson/test cases/common/7 mixed/main.cc b/meson/test cases/common/7 mixed/main.cc new file mode 100644 index 000000000..9a7a7a65c --- /dev/null +++ b/meson/test cases/common/7 mixed/main.cc @@ -0,0 +1,7 @@ +extern "C" int func(); + +class BreakPlainCCompiler; + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/7 mixed/meson.build b/meson/test cases/common/7 mixed/meson.build new file mode 100644 index 000000000..af88a1e4b --- /dev/null +++ b/meson/test cases/common/7 mixed/meson.build @@ -0,0 +1,3 @@ +project('mixed C and C++', 'c', 'cpp') +exe = executable('prog', 'main.cc', 'func.c') +test('mixtest', exe) diff --git a/meson/test cases/common/70 external test program/meson.build b/meson/test cases/common/70 external test program/meson.build new file mode 100644 index 000000000..d18ddcde5 --- /dev/null +++ b/meson/test cases/common/70 external test program/meson.build @@ -0,0 +1,3 @@ +project('test is external', 'c') + +test('external', find_program('mytest.py'), args : ['correct']) diff --git a/meson/test cases/common/70 external test program/mytest.py b/meson/test cases/common/70 external test program/mytest.py new file mode 100755 index 000000000..fee94e03f --- /dev/null +++ b/meson/test cases/common/70 external test program/mytest.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + + +import sys + +if sys.argv[1] == 'correct': + print('Argument is correct.') + sys.exit(0) +print('Argument is incorrect:', sys.argv[1]) +sys.exit(1) diff --git a/meson/test cases/common/71 ctarget dependency/gen1.py b/meson/test cases/common/71 ctarget dependency/gen1.py new file mode 100755 index 000000000..dbadb6d92 --- /dev/null +++ b/meson/test cases/common/71 ctarget dependency/gen1.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 + +import time, sys + +# Make sure other script runs first if dependency +# is missing. +time.sleep(0.5) + +with open(sys.argv[1]) as f: + contents = f.read() +with open(sys.argv[2], 'w') as f: + f.write(contents) diff --git a/meson/test cases/common/71 ctarget dependency/gen2.py b/meson/test cases/common/71 ctarget dependency/gen2.py new file mode 100755 index 000000000..dc6525b9d --- /dev/null +++ b/meson/test cases/common/71 ctarget dependency/gen2.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +import sys, os +from glob import glob + +files = glob(os.path.join(sys.argv[1], '*.tmp')) +assert(len(files) == 1) + +with open(files[0]) as ifile, open(sys.argv[2], 'w') as ofile: + ofile.write(ifile.read()) diff --git a/meson/test cases/common/71 ctarget dependency/input.dat b/meson/test cases/common/71 ctarget dependency/input.dat new file mode 100644 index 000000000..7af91e29a --- /dev/null +++ b/meson/test cases/common/71 ctarget dependency/input.dat @@ -0,0 +1 @@ +This is a piece of text. diff --git a/meson/test cases/common/71 ctarget dependency/meson.build b/meson/test cases/common/71 ctarget dependency/meson.build new file mode 100644 index 000000000..cd11951b2 --- /dev/null +++ b/meson/test cases/common/71 ctarget dependency/meson.build @@ -0,0 +1,20 @@ +project('custom target dependency', 'c') + +# Sometimes custom targets do not take input files +# but instead do globbing or some similar wackiness. +# In this case we need to be able to specify a +# manual dependency between two custom targets, +# if one needs to be run before the other. + +g1 = find_program('gen1.py') +g2 = find_program('gen2.py') + +c1 = custom_target('medput', +input : 'input.dat', +output : 'medput.tmp', +command : [g1, '@INPUT@', '@OUTPUT@']) + +custom_target('output', +output : 'output.dat', +command : [g2, '@OUTDIR@', '@OUTPUT@'], +depends : c1) diff --git a/meson/test cases/common/72 shared subproject/a.c b/meson/test cases/common/72 shared subproject/a.c new file mode 100644 index 000000000..7510a1b55 --- /dev/null +++ b/meson/test cases/common/72 shared subproject/a.c @@ -0,0 +1,13 @@ +#include +char func_b(void); +char func_c(void); + +int main(void) { + if(func_b() != 'b') { + return 1; + } + if(func_c() != 'c') { + return 2; + } + return 0; +} diff --git a/meson/test cases/common/72 shared subproject/meson.build b/meson/test cases/common/72 shared subproject/meson.build new file mode 100644 index 000000000..6803d519d --- /dev/null +++ b/meson/test cases/common/72 shared subproject/meson.build @@ -0,0 +1,10 @@ +project('A', 'c') + +B = subproject('B') +b = B.get_variable('b') + +C = subproject('C') +c = C.get_variable('c') + +a = executable('a', 'a.c', link_with : [b, c]) +test('a test', a) diff --git a/meson/test cases/common/72 shared subproject/subprojects/B/b.c b/meson/test cases/common/72 shared subproject/subprojects/B/b.c new file mode 100644 index 000000000..b8c7d83a1 --- /dev/null +++ b/meson/test cases/common/72 shared subproject/subprojects/B/b.c @@ -0,0 +1,21 @@ +#include +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + + +char func_c(void); + +char DLL_PUBLIC func_b(void) { + if(func_c() != 'c') { + exit(3); + } + return 'b'; +} diff --git a/meson/test cases/common/72 shared subproject/subprojects/B/meson.build b/meson/test cases/common/72 shared subproject/subprojects/B/meson.build new file mode 100644 index 000000000..8f4cb023e --- /dev/null +++ b/meson/test cases/common/72 shared subproject/subprojects/B/meson.build @@ -0,0 +1,4 @@ +project('B', 'c') +C = subproject('C') +c = C.get_variable('c') +b = library('b', 'b.c', link_with : c) diff --git a/meson/test cases/common/72 shared subproject/subprojects/C/c.c b/meson/test cases/common/72 shared subproject/subprojects/C/c.c new file mode 100644 index 000000000..facd19943 --- /dev/null +++ b/meson/test cases/common/72 shared subproject/subprojects/C/c.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_c(void) { + return 'c'; +} diff --git a/meson/test cases/common/72 shared subproject/subprojects/C/meson.build b/meson/test cases/common/72 shared subproject/subprojects/C/meson.build new file mode 100644 index 000000000..5d890977e --- /dev/null +++ b/meson/test cases/common/72 shared subproject/subprojects/C/meson.build @@ -0,0 +1,2 @@ +project('C', 'c') +c = library('c', 'c.c') diff --git a/meson/test cases/common/73 shared subproject 2/a.c b/meson/test cases/common/73 shared subproject 2/a.c new file mode 100644 index 000000000..7510a1b55 --- /dev/null +++ b/meson/test cases/common/73 shared subproject 2/a.c @@ -0,0 +1,13 @@ +#include +char func_b(void); +char func_c(void); + +int main(void) { + if(func_b() != 'b') { + return 1; + } + if(func_c() != 'c') { + return 2; + } + return 0; +} diff --git a/meson/test cases/common/73 shared subproject 2/meson.build b/meson/test cases/common/73 shared subproject 2/meson.build new file mode 100644 index 000000000..064732597 --- /dev/null +++ b/meson/test cases/common/73 shared subproject 2/meson.build @@ -0,0 +1,13 @@ +project('A', 'c') + +# Same as the previous test but use C and B in +# the opposite order. + +C = subproject('C') +c = C.get_variable('c') + +B = subproject('B') +b = B.get_variable('b') + +a = executable('a', 'a.c', link_with : [b, c]) +test('a test', a) diff --git a/meson/test cases/common/73 shared subproject 2/subprojects/B/b.c b/meson/test cases/common/73 shared subproject 2/subprojects/B/b.c new file mode 100644 index 000000000..4d71c0f9d --- /dev/null +++ b/meson/test cases/common/73 shared subproject 2/subprojects/B/b.c @@ -0,0 +1,20 @@ +#include +char func_c(void); + +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_b(void) { + if(func_c() != 'c') { + exit(3); + } + return 'b'; +} diff --git a/meson/test cases/common/73 shared subproject 2/subprojects/B/meson.build b/meson/test cases/common/73 shared subproject 2/subprojects/B/meson.build new file mode 100644 index 000000000..8f4cb023e --- /dev/null +++ b/meson/test cases/common/73 shared subproject 2/subprojects/B/meson.build @@ -0,0 +1,4 @@ +project('B', 'c') +C = subproject('C') +c = C.get_variable('c') +b = library('b', 'b.c', link_with : c) diff --git a/meson/test cases/common/73 shared subproject 2/subprojects/C/c.c b/meson/test cases/common/73 shared subproject 2/subprojects/C/c.c new file mode 100644 index 000000000..facd19943 --- /dev/null +++ b/meson/test cases/common/73 shared subproject 2/subprojects/C/c.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_c(void) { + return 'c'; +} diff --git a/meson/test cases/common/73 shared subproject 2/subprojects/C/meson.build b/meson/test cases/common/73 shared subproject 2/subprojects/C/meson.build new file mode 100644 index 000000000..5d890977e --- /dev/null +++ b/meson/test cases/common/73 shared subproject 2/subprojects/C/meson.build @@ -0,0 +1,2 @@ +project('C', 'c') +c = library('c', 'c.c') diff --git a/meson/test cases/common/74 file object/lib.c b/meson/test cases/common/74 file object/lib.c new file mode 100644 index 000000000..91800303c --- /dev/null +++ b/meson/test cases/common/74 file object/lib.c @@ -0,0 +1,3 @@ +int func(void) { + return 0; +} diff --git a/meson/test cases/common/74 file object/meson.build b/meson/test cases/common/74 file object/meson.build new file mode 100644 index 000000000..c3ecb7be9 --- /dev/null +++ b/meson/test cases/common/74 file object/meson.build @@ -0,0 +1,9 @@ +project('file object', 'c') + +prog0 = files('prog.c') +lib0 = files('lib.c') +test('fobj', executable('fobj', prog0, lib0)) + +subdir('subdir1') +subdir('subdir2') + diff --git a/meson/test cases/common/74 file object/prog.c b/meson/test cases/common/74 file object/prog.c new file mode 100644 index 000000000..78c6acc27 --- /dev/null +++ b/meson/test cases/common/74 file object/prog.c @@ -0,0 +1,13 @@ +#include + +int func(void); /* Files in different subdirs return different values. */ + +int main(void) { + if(func() == 0) { + printf("Iz success.\n"); + } else { + printf("Iz fail.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/74 file object/subdir1/lib.c b/meson/test cases/common/74 file object/subdir1/lib.c new file mode 100644 index 000000000..8c13c2126 --- /dev/null +++ b/meson/test cases/common/74 file object/subdir1/lib.c @@ -0,0 +1,3 @@ +int func(void) { + return 1; +} diff --git a/meson/test cases/common/74 file object/subdir1/meson.build b/meson/test cases/common/74 file object/subdir1/meson.build new file mode 100644 index 000000000..f5066f00f --- /dev/null +++ b/meson/test cases/common/74 file object/subdir1/meson.build @@ -0,0 +1,7 @@ +prog1 = files('prog.c') +lib1 = files('lib.c') + +test('subdir0', executable('subdir0', prog0, lib1), should_fail : true) +test('subdir1', executable('subdir1', prog1, lib0), should_fail : true) + +test('subdir2', executable('subdir2', prog1, lib1)) \ No newline at end of file diff --git a/meson/test cases/common/74 file object/subdir1/prog.c b/meson/test cases/common/74 file object/subdir1/prog.c new file mode 100644 index 000000000..38d13d242 --- /dev/null +++ b/meson/test cases/common/74 file object/subdir1/prog.c @@ -0,0 +1,13 @@ +#include + +int func(void); + +int main(void) { + if(func() == 1) { + printf("Iz success.\n"); + } else { + printf("Iz fail.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/74 file object/subdir2/lib.c b/meson/test cases/common/74 file object/subdir2/lib.c new file mode 100644 index 000000000..8e2b4ebfb --- /dev/null +++ b/meson/test cases/common/74 file object/subdir2/lib.c @@ -0,0 +1,3 @@ +int func(void) { + return 2; +} diff --git a/meson/test cases/common/74 file object/subdir2/meson.build b/meson/test cases/common/74 file object/subdir2/meson.build new file mode 100644 index 000000000..588651019 --- /dev/null +++ b/meson/test cases/common/74 file object/subdir2/meson.build @@ -0,0 +1,7 @@ +prog2 = files('prog.c') +lib2 = files('lib.c') + +test('subdir3', executable('subdir3', prog1, lib2), should_fail : true) +test('subdir4', executable('subdir4', prog2, lib1), should_fail : true) + +test('subdir4', executable('subdir5', prog2, lib2)) \ No newline at end of file diff --git a/meson/test cases/common/74 file object/subdir2/prog.c b/meson/test cases/common/74 file object/subdir2/prog.c new file mode 100644 index 000000000..8a8f0d016 --- /dev/null +++ b/meson/test cases/common/74 file object/subdir2/prog.c @@ -0,0 +1,13 @@ +#include + +int func(void); + +int main(void) { + if(func() == 2) { + printf("Iz success.\n"); + } else { + printf("Iz fail.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/75 custom subproject dir/a.c b/meson/test cases/common/75 custom subproject dir/a.c new file mode 100644 index 000000000..7510a1b55 --- /dev/null +++ b/meson/test cases/common/75 custom subproject dir/a.c @@ -0,0 +1,13 @@ +#include +char func_b(void); +char func_c(void); + +int main(void) { + if(func_b() != 'b') { + return 1; + } + if(func_c() != 'c') { + return 2; + } + return 0; +} diff --git a/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/b.c b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/b.c new file mode 100644 index 000000000..4d71c0f9d --- /dev/null +++ b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/b.c @@ -0,0 +1,20 @@ +#include +char func_c(void); + +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_b(void) { + if(func_c() != 'c') { + exit(3); + } + return 'b'; +} diff --git a/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/meson.build b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/meson.build new file mode 100644 index 000000000..280c60ce2 --- /dev/null +++ b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/B/meson.build @@ -0,0 +1,4 @@ +project('B', 'c') +C = subproject('C') +c = C.get_variable('c') +b = shared_library('b', 'b.c', link_with : c) diff --git a/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/c.c b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/c.c new file mode 100644 index 000000000..facd19943 --- /dev/null +++ b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/c.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +char DLL_PUBLIC func_c(void) { + return 'c'; +} diff --git a/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/meson.build b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/meson.build new file mode 100644 index 000000000..abf0b1e26 --- /dev/null +++ b/meson/test cases/common/75 custom subproject dir/custom_subproject_dir/C/meson.build @@ -0,0 +1,2 @@ +project('C', 'c') +c = shared_library('c', 'c.c') diff --git a/meson/test cases/common/75 custom subproject dir/meson.build b/meson/test cases/common/75 custom subproject dir/meson.build new file mode 100644 index 000000000..d9ba649b1 --- /dev/null +++ b/meson/test cases/common/75 custom subproject dir/meson.build @@ -0,0 +1,10 @@ +project('A', 'c', subproject_dir:'custom_subproject_dir') + +B = subproject('B') +b = B.get_variable('b') + +C = subproject('C') +c = C.get_variable('c') + +a = executable('a', 'a.c', link_with : [b, c]) +test('a test', a) diff --git a/meson/test cases/common/76 has type/meson.build b/meson/test cases/common/76 has type/meson.build new file mode 100644 index 000000000..de8dbc834 --- /dev/null +++ b/meson/test cases/common/76 has type/meson.build @@ -0,0 +1,13 @@ +project('has type', 'c', 'cpp') + +compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')] + +foreach cc : compilers + if not cc.has_type('time_t', prefix : '#include') + error('Did not detect type that exists.') + endif + + if cc.has_type('no_time_t', prefix : '#include') + error('Not existing type found.') + endif +endforeach diff --git a/meson/test cases/common/77 extract from nested subdir/meson.build b/meson/test cases/common/77 extract from nested subdir/meson.build new file mode 100644 index 000000000..000e1aa6a --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/meson.build @@ -0,0 +1,8 @@ +project('Extract objects from subdirs', 'c') + +if meson.is_unity() + message('Unity build: skipping incompatible test') +else + subdir('src') + subdir('tst') +endif diff --git a/meson/test cases/common/77 extract from nested subdir/src/first/lib_first.c b/meson/test cases/common/77 extract from nested subdir/src/first/lib_first.c new file mode 100644 index 000000000..5fc32678c --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/src/first/lib_first.c @@ -0,0 +1,3 @@ +int first(void) { + return 1001; +} diff --git a/meson/test cases/common/77 extract from nested subdir/src/first/meson.build b/meson/test cases/common/77 extract from nested subdir/src/first/meson.build new file mode 100644 index 000000000..b97aef44e --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/src/first/meson.build @@ -0,0 +1 @@ +first_lib = shared_library('first_lib', 'lib_first.c') diff --git a/meson/test cases/common/77 extract from nested subdir/src/meson.build b/meson/test cases/common/77 extract from nested subdir/src/meson.build new file mode 100644 index 000000000..3f5ec3200 --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/src/meson.build @@ -0,0 +1 @@ +subdir('first') diff --git a/meson/test cases/common/77 extract from nested subdir/tst/first/exe_first.c b/meson/test cases/common/77 extract from nested subdir/tst/first/exe_first.c new file mode 100644 index 000000000..e8188cd25 --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/tst/first/exe_first.c @@ -0,0 +1,5 @@ +int first(void); + +int main(void) { + return first() - 1001; +} diff --git a/meson/test cases/common/77 extract from nested subdir/tst/first/meson.build b/meson/test cases/common/77 extract from nested subdir/tst/first/meson.build new file mode 100644 index 000000000..a6fa7da1b --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/tst/first/meson.build @@ -0,0 +1,4 @@ +first_exe = executable('first_exe', 'exe_first.c', + objects : first_lib.extract_objects('lib_first.c')) + +test('first_test', first_exe) diff --git a/meson/test cases/common/77 extract from nested subdir/tst/meson.build b/meson/test cases/common/77 extract from nested subdir/tst/meson.build new file mode 100644 index 000000000..3f5ec3200 --- /dev/null +++ b/meson/test cases/common/77 extract from nested subdir/tst/meson.build @@ -0,0 +1 @@ +subdir('first') diff --git a/meson/test cases/common/78 internal dependency/meson.build b/meson/test cases/common/78 internal dependency/meson.build new file mode 100644 index 000000000..6faedb09e --- /dev/null +++ b/meson/test cases/common/78 internal dependency/meson.build @@ -0,0 +1,4 @@ +project('internal dependency', 'c') + +subdir('proj1') +subdir('src') diff --git a/meson/test cases/common/78 internal dependency/proj1/include/proj1.h b/meson/test cases/common/78 internal dependency/proj1/include/proj1.h new file mode 100644 index 000000000..ecb1e4b6f --- /dev/null +++ b/meson/test cases/common/78 internal dependency/proj1/include/proj1.h @@ -0,0 +1,5 @@ +#pragma once + +void proj1_func1(void); +void proj1_func2(void); +void proj1_func3(void); diff --git a/meson/test cases/common/78 internal dependency/proj1/meson.build b/meson/test cases/common/78 internal dependency/proj1/meson.build new file mode 100644 index 000000000..422021edb --- /dev/null +++ b/meson/test cases/common/78 internal dependency/proj1/meson.build @@ -0,0 +1,11 @@ +incdirs = include_directories('include') + +p1lib = static_library('proj1', 'proj1f1.c', + include_directories : incdirs +) + +indirect_source = files('proj1f2.c') + +proj1_dep = declare_dependency(include_directories : incdirs, + link_with : p1lib, + sources : ['proj1f3.c', indirect_source]) diff --git a/meson/test cases/common/78 internal dependency/proj1/proj1f1.c b/meson/test cases/common/78 internal dependency/proj1/proj1f1.c new file mode 100644 index 000000000..69fa823e1 --- /dev/null +++ b/meson/test cases/common/78 internal dependency/proj1/proj1f1.c @@ -0,0 +1,6 @@ +#include +#include + +void proj1_func1(void) { + printf("In proj1_func1.\n"); +} diff --git a/meson/test cases/common/78 internal dependency/proj1/proj1f2.c b/meson/test cases/common/78 internal dependency/proj1/proj1f2.c new file mode 100644 index 000000000..7dd621c3a --- /dev/null +++ b/meson/test cases/common/78 internal dependency/proj1/proj1f2.c @@ -0,0 +1,6 @@ +#include +#include + +void proj1_func2(void) { + printf("In proj1_func2.\n"); +} diff --git a/meson/test cases/common/78 internal dependency/proj1/proj1f3.c b/meson/test cases/common/78 internal dependency/proj1/proj1f3.c new file mode 100644 index 000000000..2861ddcf6 --- /dev/null +++ b/meson/test cases/common/78 internal dependency/proj1/proj1f3.c @@ -0,0 +1,6 @@ +#include +#include + +void proj1_func3(void) { + printf("In proj1_func3.\n"); +} diff --git a/meson/test cases/common/78 internal dependency/src/main.c b/meson/test cases/common/78 internal dependency/src/main.c new file mode 100644 index 000000000..dbb7c4a06 --- /dev/null +++ b/meson/test cases/common/78 internal dependency/src/main.c @@ -0,0 +1,10 @@ +#include +#include + +int main(void) { + printf("Now calling into library.\n"); + proj1_func1(); + proj1_func2(); + proj1_func3(); + return 0; +} diff --git a/meson/test cases/common/78 internal dependency/src/meson.build b/meson/test cases/common/78 internal dependency/src/meson.build new file mode 100644 index 000000000..89f99abd1 --- /dev/null +++ b/meson/test cases/common/78 internal dependency/src/meson.build @@ -0,0 +1,2 @@ +exe = executable('projtest', 'main.c', dependencies : proj1_dep) +test('projtest', exe) diff --git a/meson/test cases/common/79 same basename/exe1.c b/meson/test cases/common/79 same basename/exe1.c new file mode 100644 index 000000000..128f2bb15 --- /dev/null +++ b/meson/test cases/common/79 same basename/exe1.c @@ -0,0 +1,5 @@ +int func(void); + +int main(void) { + return func(); +} diff --git a/meson/test cases/common/79 same basename/exe2.c b/meson/test cases/common/79 same basename/exe2.c new file mode 100644 index 000000000..d2d89956d --- /dev/null +++ b/meson/test cases/common/79 same basename/exe2.c @@ -0,0 +1,5 @@ +int func(void); + +int main(void) { + return func() == 1 ? 0 : 1; +} diff --git a/meson/test cases/common/79 same basename/lib.c b/meson/test cases/common/79 same basename/lib.c new file mode 100644 index 000000000..b3798e99e --- /dev/null +++ b/meson/test cases/common/79 same basename/lib.c @@ -0,0 +1,23 @@ +#if defined _WIN32 || defined __CYGWIN__ +#define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +#if defined SHAR +int DLL_PUBLIC func(void) { + return 1; +} +#elif defined STAT +int func(void) { + return 0; +} +#else +#error "Missing type definition." +#endif + diff --git a/meson/test cases/common/79 same basename/meson.build b/meson/test cases/common/79 same basename/meson.build new file mode 100644 index 000000000..856c536c0 --- /dev/null +++ b/meson/test cases/common/79 same basename/meson.build @@ -0,0 +1,14 @@ +project('same basename', 'c') + +subdir('sharedsub') +subdir('staticsub') + +# Use the same source file to check that each top level target +# has its own unique working directory. If they don't +# then the .o files will clobber each other. + +exe1 = executable('name', 'exe1.c', link_with : stlib) +exe2 = executable('name2', 'exe2.c', link_with : shlib) + +test('static', exe1) +test('shared', exe2) diff --git a/meson/test cases/common/79 same basename/sharedsub/meson.build b/meson/test cases/common/79 same basename/sharedsub/meson.build new file mode 100644 index 000000000..29654a94d --- /dev/null +++ b/meson/test cases/common/79 same basename/sharedsub/meson.build @@ -0,0 +1 @@ +shlib = shared_library('name', '../lib.c', c_args : '-DSHAR') diff --git a/meson/test cases/common/79 same basename/staticsub/meson.build b/meson/test cases/common/79 same basename/staticsub/meson.build new file mode 100644 index 000000000..5e5242e06 --- /dev/null +++ b/meson/test cases/common/79 same basename/staticsub/meson.build @@ -0,0 +1,3 @@ +# On Windows a static lib is now libfoo.a, so it does not conflict with foo.lib +# from the shared library above +stlib = static_library('name', '../lib.c', c_args : '-DSTAT') diff --git a/meson/test cases/common/8 install/gendir.py b/meson/test cases/common/8 install/gendir.py new file mode 100755 index 000000000..b42327db3 --- /dev/null +++ b/meson/test cases/common/8 install/gendir.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 + +import sys, os + +dirname = sys.argv[1] +fname = os.path.join(dirname, 'file.txt') +os.makedirs(dirname, exist_ok=True) +open(fname, 'w').close() diff --git a/meson/test cases/common/8 install/meson.build b/meson/test cases/common/8 install/meson.build new file mode 100644 index 000000000..6eb5ba5ef --- /dev/null +++ b/meson/test cases/common/8 install/meson.build @@ -0,0 +1,10 @@ +project('install test', 'c', default_options : ['libdir=libtest']) + +stlib = static_library('stat', 'stat.c', install : true) +exe = executable('prog', 'prog.c', install : true) + +dirtarget = custom_target('dirtarget', + output: ['dir'], + install: true, + command: [find_program('gendir.py'), '@OUTPUT@'], + install_dir: get_option('datadir')) diff --git a/meson/test cases/common/8 install/prog.c b/meson/test cases/common/8 install/prog.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/common/8 install/prog.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/common/8 install/stat.c b/meson/test cases/common/8 install/stat.c new file mode 100644 index 000000000..4825cefd2 --- /dev/null +++ b/meson/test cases/common/8 install/stat.c @@ -0,0 +1 @@ +int func(void) { return 933; } diff --git a/meson/test cases/common/8 install/test.json b/meson/test cases/common/8 install/test.json new file mode 100644 index 000000000..b31f28782 --- /dev/null +++ b/meson/test cases/common/8 install/test.json @@ -0,0 +1,9 @@ +{ + "installed": [ + { "type": "exe", "file": "usr/bin/prog" }, + { "type": "pdb", "file": "usr/bin/prog" }, + { "type": "file", "file": "usr/share/dir/file.txt" }, + { "type": "file", "file": "usr/libtest/libstat.a" } + ], + "do_not_set_opts": ["libdir"] +} diff --git a/meson/test cases/common/80 declare dep/entity/entity.h b/meson/test cases/common/80 declare dep/entity/entity.h new file mode 100644 index 000000000..959a8c30d --- /dev/null +++ b/meson/test cases/common/80 declare dep/entity/entity.h @@ -0,0 +1,4 @@ +#pragma once + +int entity_func1(void); +int entity_func2(void); diff --git a/meson/test cases/common/80 declare dep/entity/entity1.c b/meson/test cases/common/80 declare dep/entity/entity1.c new file mode 100644 index 000000000..d124e24e8 --- /dev/null +++ b/meson/test cases/common/80 declare dep/entity/entity1.c @@ -0,0 +1,9 @@ +#include"entity.h" + +#ifdef USING_ENT +#error "Entity use flag leaked into entity compilation." +#endif + +int entity_func1(void) { + return 5; +} diff --git a/meson/test cases/common/80 declare dep/entity/entity2.c b/meson/test cases/common/80 declare dep/entity/entity2.c new file mode 100644 index 000000000..4e8bb07e1 --- /dev/null +++ b/meson/test cases/common/80 declare dep/entity/entity2.c @@ -0,0 +1,5 @@ +#include + +int entity_func2(void) { + return 9; +} diff --git a/meson/test cases/common/80 declare dep/entity/meson.build b/meson/test cases/common/80 declare dep/entity/meson.build new file mode 100644 index 000000000..469ecd3cb --- /dev/null +++ b/meson/test cases/common/80 declare dep/entity/meson.build @@ -0,0 +1,10 @@ +entity_lib = static_library('entity', 'entity1.c') + +entity_dep = declare_dependency(link_with : [[entity_lib]], + include_directories : [['.']], + sources : 'entity2.c', + compile_args : ['-DUSING_ENT=1'], + version : '1.2.3', + link_args : []) # No simple way of testing linker flags :(. + +assert(entity_dep.version().version_compare('==1.2.3'), 'Declare_dep has incorrect version string.') diff --git a/meson/test cases/common/80 declare dep/main.c b/meson/test cases/common/80 declare dep/main.c new file mode 100644 index 000000000..62200c943 --- /dev/null +++ b/meson/test cases/common/80 declare dep/main.c @@ -0,0 +1,18 @@ +#include +#include + +#ifndef USING_ENT +#error "Entity use flag not used for compilation." +#endif + +int main(void) { + if(entity_func1() != 5) { + printf("Error in func1.\n"); + return 1; + } + if(entity_func2() != 9) { + printf("Error in func2.\n"); + return 2; + } + return 0; +} diff --git a/meson/test cases/common/80 declare dep/meson.build b/meson/test cases/common/80 declare dep/meson.build new file mode 100644 index 000000000..e427defaf --- /dev/null +++ b/meson/test cases/common/80 declare dep/meson.build @@ -0,0 +1,24 @@ +project('declare dependency', 'c') + +subdir('entity') + +exe = executable('dep_user', 'main.c', + dependencies : entity_dep) +test('dep', exe) + +# just to make sure [] works as a no-op dep here +executable('dummy', 'main.c', + dependencies : [entity_dep, []]) + +# simple case +declare_dependency(dependencies : entity_dep) + +# nested deps should be flattened +declare_dependency(dependencies : [entity_dep]) +declare_dependency(dependencies : [[entity_dep]]) + +# check that [] properly works as a no-op dep in declare_dependency() too +declare_dependency(dependencies : []) +declare_dependency(dependencies : [[]]) +declare_dependency(dependencies : [entity_dep, []]) +declare_dependency(dependencies : [[], entity_dep]) diff --git a/meson/test cases/common/81 extract all/extractor.h b/meson/test cases/common/81 extract all/extractor.h new file mode 100644 index 000000000..cfb7ff6d6 --- /dev/null +++ b/meson/test cases/common/81 extract all/extractor.h @@ -0,0 +1,6 @@ +#pragma once + +int func1(void); +int func2(void); +int func3(void); +int func4(void); diff --git a/meson/test cases/common/81 extract all/four.c b/meson/test cases/common/81 extract all/four.c new file mode 100644 index 000000000..f67a85e68 --- /dev/null +++ b/meson/test cases/common/81 extract all/four.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func4(void) { + return 4; +} diff --git a/meson/test cases/common/81 extract all/meson.build b/meson/test cases/common/81 extract all/meson.build new file mode 100644 index 000000000..4f08a4fa6 --- /dev/null +++ b/meson/test cases/common/81 extract all/meson.build @@ -0,0 +1,13 @@ +project('extract all', 'c') + +a = static_library('a', 'one.c', 'two.c') +b = static_library('b', 'three.c', 'four.c') +c = static_library('c', objects : [a.extract_all_objects(), b.extract_all_objects()]) +d = static_library('d', objects : [a.extract_all_objects(), b.extract_all_objects(), c.extract_all_objects()]) +d_recursive = static_library('d_recursive', objects : [c.extract_all_objects(recursive : true)]) + +e = executable('proggie', 'prog.c', link_with : d) +test('extall', e) + +e = executable('proggie_recursive', 'prog.c', link_with : d_recursive) +test('extall_recursive', e) diff --git a/meson/test cases/common/81 extract all/one.c b/meson/test cases/common/81 extract all/one.c new file mode 100644 index 000000000..152a1455d --- /dev/null +++ b/meson/test cases/common/81 extract all/one.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func1(void) { + return 1; +} diff --git a/meson/test cases/common/81 extract all/prog.c b/meson/test cases/common/81 extract all/prog.c new file mode 100644 index 000000000..de0cc7f8e --- /dev/null +++ b/meson/test cases/common/81 extract all/prog.c @@ -0,0 +1,10 @@ +#include"extractor.h" +#include + +int main(void) { + if((1+2+3+4) != (func1() + func2() + func3() + func4())) { + printf("Arithmetic is fail.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/81 extract all/three.c b/meson/test cases/common/81 extract all/three.c new file mode 100644 index 000000000..24604ed72 --- /dev/null +++ b/meson/test cases/common/81 extract all/three.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func3(void) { + return 3; +} diff --git a/meson/test cases/common/81 extract all/two.c b/meson/test cases/common/81 extract all/two.c new file mode 100644 index 000000000..800cd2dfb --- /dev/null +++ b/meson/test cases/common/81 extract all/two.c @@ -0,0 +1,5 @@ +#include"extractor.h" + +int func2(void) { + return 2; +} diff --git a/meson/test cases/common/82 add language/meson.build b/meson/test cases/common/82 add language/meson.build new file mode 100644 index 000000000..e99f33a81 --- /dev/null +++ b/meson/test cases/common/82 add language/meson.build @@ -0,0 +1,10 @@ +project('add language', 'c') + +test('C', executable('cprog', 'prog.c')) + +assert(add_languages('cpp', native: false), 'Add_languages returned false on success') +assert(not add_languages('klingon', required : false), 'Add_languages returned true on failure.') + +test('C++', executable('cppprog', 'prog.cc')) + +add_languages('c', native: false) diff --git a/meson/test cases/common/82 add language/prog.c b/meson/test cases/common/82 add language/prog.c new file mode 100644 index 000000000..9d747f00d --- /dev/null +++ b/meson/test cases/common/82 add language/prog.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am plain C.\n"); + return 0; +} diff --git a/meson/test cases/common/82 add language/prog.cc b/meson/test cases/common/82 add language/prog.cc new file mode 100644 index 000000000..03647dde4 --- /dev/null +++ b/meson/test cases/common/82 add language/prog.cc @@ -0,0 +1,6 @@ +#include + +int main(int, char**) { + std::cout << "I am C++.\n"; + return 0; +} diff --git a/meson/test cases/common/83 identical target name in subproject/bar.c b/meson/test cases/common/83 identical target name in subproject/bar.c new file mode 100644 index 000000000..f1bd822dc --- /dev/null +++ b/meson/test cases/common/83 identical target name in subproject/bar.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I'm a main project bar.\n"); + return 0; +} diff --git a/meson/test cases/common/83 identical target name in subproject/meson.build b/meson/test cases/common/83 identical target name in subproject/meson.build new file mode 100644 index 000000000..64b436347 --- /dev/null +++ b/meson/test cases/common/83 identical target name in subproject/meson.build @@ -0,0 +1,9 @@ +project('toplevel bar', 'c') + +subproject('foo') + +true_cmd = find_program('true.py') + +executable('bar', 'bar.c') +run_target('nop', command : [true_cmd]) +custom_target('cus', output: ['cus.c'], command : [true_cmd]) diff --git a/meson/test cases/common/83 identical target name in subproject/subprojects/foo/bar.c b/meson/test cases/common/83 identical target name in subproject/subprojects/foo/bar.c new file mode 100644 index 000000000..5e7006baf --- /dev/null +++ b/meson/test cases/common/83 identical target name in subproject/subprojects/foo/bar.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I'm a subproject bar.\n"); + return 0; +} diff --git a/meson/test cases/common/83 identical target name in subproject/subprojects/foo/meson.build b/meson/test cases/common/83 identical target name in subproject/subprojects/foo/meson.build new file mode 100644 index 000000000..94b235c14 --- /dev/null +++ b/meson/test cases/common/83 identical target name in subproject/subprojects/foo/meson.build @@ -0,0 +1,7 @@ +project('subfoo', 'c') + +true_cmd = find_program('true.py') + +executable('bar', 'bar.c') +run_target('nop', command : [true_cmd]) +custom_target('cus', output: ['cus.c'], command : [true_cmd]) diff --git a/meson/test cases/common/83 identical target name in subproject/subprojects/foo/true.py b/meson/test cases/common/83 identical target name in subproject/subprojects/foo/true.py new file mode 100644 index 000000000..ddcac9e49 --- /dev/null +++ b/meson/test cases/common/83 identical target name in subproject/subprojects/foo/true.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 + +if __name__ == '__main__': + pass diff --git a/meson/test cases/common/83 identical target name in subproject/true.py b/meson/test cases/common/83 identical target name in subproject/true.py new file mode 100644 index 000000000..ddcac9e49 --- /dev/null +++ b/meson/test cases/common/83 identical target name in subproject/true.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 + +if __name__ == '__main__': + pass diff --git a/meson/test cases/common/84 plusassign/meson.build b/meson/test cases/common/84 plusassign/meson.build new file mode 100644 index 000000000..ac477e740 --- /dev/null +++ b/meson/test cases/common/84 plusassign/meson.build @@ -0,0 +1,70 @@ +project('plusassign', 'c') + +x = [] + +x += 'a' + +if x.length() != 1 + error('Incorrect append') +endif + +if x[0] != 'a' + error('Incorrect append 2.') +endif + +y = x + +x += 'b' + +if y.length() != 1 + error('Immutability broken.') +endif + +if y[0] != 'a' + error('Immutability broken 2.') +endif + +if x.length() != 2 + error('Incorrect append 3') +endif + +if x[0] != 'a' + error('Incorrect append 4.') +endif + +if x[1] != 'b' + error('Incorrect append 5.') +endif + +# Now with evil added: append yourself. + +x += x + +if x.length() != 4 + error('Incorrect selfappend.') +endif + +# += on strings + +bra = 'bra' +foo = 'A' +foo += bra +foo += 'cada' +foo += bra +assert (foo == 'Abracadabra', 'string += failure [@0@]'.format(foo)) +assert (bra == 'bra', 'string += modified right argument!') +foo += ' ' + foo +assert (foo == 'Abracadabra Abracadabra', 'string += failure [@0@]'.format(foo)) + +# += on ints + +foo = 5 +foo += 6 +assert (foo == 11, 'int += failure [@0@]'.format(foo)) +bar = 99 +foo += bar +assert (foo == 110, 'int += failure [@0@]'.format(foo)) +assert (bar == 99, 'int += modified right argument"') +bar += foo + 1 +assert (bar == 210, 'int += failure [@0@]'.format(bar)) +assert (foo == 110, 'int += modified right argument"') diff --git a/meson/test cases/common/85 skip subdir/meson.build b/meson/test cases/common/85 skip subdir/meson.build new file mode 100644 index 000000000..30ede0ea2 --- /dev/null +++ b/meson/test cases/common/85 skip subdir/meson.build @@ -0,0 +1,3 @@ +project('foo', 'c') + +subdir('subdir1/subdir2') diff --git a/meson/test cases/common/85 skip subdir/subdir1/meson.build b/meson/test cases/common/85 skip subdir/subdir1/meson.build new file mode 100644 index 000000000..51cb003e1 --- /dev/null +++ b/meson/test cases/common/85 skip subdir/subdir1/meson.build @@ -0,0 +1 @@ +error('This should not be called.') diff --git a/meson/test cases/common/85 skip subdir/subdir1/subdir2/meson.build b/meson/test cases/common/85 skip subdir/subdir1/subdir2/meson.build new file mode 100644 index 000000000..e37cad6b6 --- /dev/null +++ b/meson/test cases/common/85 skip subdir/subdir1/subdir2/meson.build @@ -0,0 +1 @@ +message('I\'m in subdir subdir.') diff --git a/meson/test cases/common/86 private include/meson.build b/meson/test cases/common/86 private include/meson.build new file mode 100644 index 000000000..2485fbf74 --- /dev/null +++ b/meson/test cases/common/86 private include/meson.build @@ -0,0 +1,4 @@ +project('access private', 'c') + +subdir('stlib') +subdir('user') diff --git a/meson/test cases/common/86 private include/stlib/compiler.py b/meson/test cases/common/86 private include/stlib/compiler.py new file mode 100755 index 000000000..c8597ddc1 --- /dev/null +++ b/meson/test cases/common/86 private include/stlib/compiler.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 + +import sys, os + +assert(len(sys.argv) == 3) + +h_templ = '''#pragma once +unsigned int %s(void); +''' + +c_templ = '''#include"%s.h" + +unsigned int %s(void) { + return 0; +} +''' + +ifile = sys.argv[1] +outdir = sys.argv[2] + +base = os.path.splitext(os.path.split(ifile)[-1])[0] + +cfile = os.path.join(outdir, base + '.c') +hfile = os.path.join(outdir, base + '.h') + +c_code = c_templ % (base, base) +h_code = h_templ % base + +with open(cfile, 'w') as f: + f.write(c_code) +with open(hfile, 'w') as f: + f.write(h_code) diff --git a/meson/test cases/common/86 private include/stlib/foo1.def b/meson/test cases/common/86 private include/stlib/foo1.def new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/86 private include/stlib/foo2.def b/meson/test cases/common/86 private include/stlib/foo2.def new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/common/86 private include/stlib/meson.build b/meson/test cases/common/86 private include/stlib/meson.build new file mode 100644 index 000000000..8d70650d8 --- /dev/null +++ b/meson/test cases/common/86 private include/stlib/meson.build @@ -0,0 +1,12 @@ +genbin = find_program('compiler.py') + +gen = generator(genbin, + output : ['@BASENAME@.h', '@BASENAME@.c'], + arguments : ['@INPUT@', '@BUILD_DIR@'] + ) + +defs = ['foo1.def', 'foo2.def'] +generated = gen.process(defs) + +stlib = static_library('st', generated) +st_priv_inc = stlib.private_dir_include() diff --git a/meson/test cases/common/86 private include/user/libuser.c b/meson/test cases/common/86 private include/user/libuser.c new file mode 100644 index 000000000..c1724867b --- /dev/null +++ b/meson/test cases/common/86 private include/user/libuser.c @@ -0,0 +1,6 @@ +#include"foo1.h" +#include"foo2.h" + +int main(void) { + return foo1() + foo2(); +} diff --git a/meson/test cases/common/86 private include/user/meson.build b/meson/test cases/common/86 private include/user/meson.build new file mode 100644 index 000000000..ab88b1dbd --- /dev/null +++ b/meson/test cases/common/86 private include/user/meson.build @@ -0,0 +1,5 @@ +exe = executable('libuser', 'libuser.c', + link_with : stlib, + include_directories : st_priv_inc) + +test('libuser', exe) diff --git a/meson/test cases/common/87 default options/meson.build b/meson/test cases/common/87 default options/meson.build new file mode 100644 index 000000000..51b5cdac9 --- /dev/null +++ b/meson/test cases/common/87 default options/meson.build @@ -0,0 +1,33 @@ +project('default options', 'cpp', 'c', default_options : [ + 'prefix=/absoluteprefix', + 'buildtype=debugoptimized', + 'cpp_std=c++11', + 'cpp_eh=none', + 'warning_level=3', + 'sub1:test_option=false', + ]) + +assert(get_option('buildtype') == 'debugoptimized', 'Build type default value wrong.') + +cpp_eh = get_option('cpp_eh') +assert(cpp_eh == 'none', 'EH value is "' + cpp_eh + '" instead of "none"') +cpp_std = get_option('cpp_std') +assert(cpp_std == 'c++11', 'C++ std value is "' + cpp_std + '" instead of c++11.') + +w_level = get_option('warning_level') +assert(w_level == '3', 'warning level "' + w_level + '" instead of "3"') + +# FIXME. Since we no longer accept invalid options to c_std etc, +# there is no simple way to test this. Gcc does not seem to expose +# the C std used in a preprocessor token so we can't check for it. +# Think of a way to fix this. +# +# # Verify that project args are not used when told not to. +# # MSVC plain C does not have a simple arg to test so skip it. +# if cpp.get_id() != 'msvc' +# cc = meson.get_compiler('c') +# assert(not cc.compiles('int foobar;'), 'Default arg not used in test.') +# assert(cc.compiles('int foobar;', no_builtin_args : true), 'No_builtin did not disable builtins.') +# endif + +subproject('sub1') diff --git a/meson/test cases/common/87 default options/subprojects/sub1/meson.build b/meson/test cases/common/87 default options/subprojects/sub1/meson.build new file mode 100644 index 000000000..de0dc216c --- /dev/null +++ b/meson/test cases/common/87 default options/subprojects/sub1/meson.build @@ -0,0 +1,3 @@ +project('sub1') + +assert(get_option('test_option') == false) diff --git a/meson/test cases/common/87 default options/subprojects/sub1/meson_options.txt b/meson/test cases/common/87 default options/subprojects/sub1/meson_options.txt new file mode 100644 index 000000000..fc96f5e09 --- /dev/null +++ b/meson/test cases/common/87 default options/subprojects/sub1/meson_options.txt @@ -0,0 +1 @@ +option('test_option', type : 'boolean', value : true, description : 'Test option. Superproject overrides default to "false"') diff --git a/meson/test cases/common/88 dep fallback/gensrc.py b/meson/test cases/common/88 dep fallback/gensrc.py new file mode 100644 index 000000000..ff42ac359 --- /dev/null +++ b/meson/test cases/common/88 dep fallback/gensrc.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys +import shutil + +shutil.copyfile(sys.argv[1], sys.argv[2]) diff --git a/meson/test cases/common/88 dep fallback/meson.build b/meson/test cases/common/88 dep fallback/meson.build new file mode 100644 index 000000000..2e962f6c3 --- /dev/null +++ b/meson/test cases/common/88 dep fallback/meson.build @@ -0,0 +1,38 @@ +project('dep fallback', 'c') + +bob = dependency('boblib', fallback : ['boblib', 'bob_dep'], required: false, + default_options : 'warning_level=1') +if not bob.found() + error('Bob is actually needed') +endif + +# boblib subproject exists, but bobinc is not a dependency variable +sita = dependency('sitalib', fallback : ['boblib', 'bobinc'], required: false) +assert(not sita.found()) +# boblib subproject exists, but sita_dep doesn't exist +sita = dependency('sitalib', fallback : ['boblib', 'sita_dep'], required: false) +assert(not sita.found()) +# boblib has been configured so zlib cannot be searched on the system +zlib = dependency('zlib', fallback : ['boblib', 'notfound_dep'], required: false) +assert(not zlib.found()) +# boblib has been configured so zlib cannot be searched on the system. +# Not variable name provided and the subproject does not override zlib. +zlib = dependency('zlib', fallback : 'boblib', required: false) +assert(not zlib.found()) + +# jimmylib subproject doesn't exist +jimmy = dependency('jimmylib', fallback : ['jimmylib', 'jimmy_dep'], required: false) +# dummylib subproject fails to configure +dummy = dependency('dummylib', fallback : ['dummylib', 'dummy_dep'], required: false) + +gensrc_py = find_program('gensrc.py') +gensrc = custom_target('gensrc.c', + input : 'tester.c', + output : 'gensrc.c', + command : [gensrc_py, '@INPUT@', '@OUTPUT@']) + +exe = executable('bobtester', + [gensrc], + dependencies : bob) + +test('bobtester', exe) diff --git a/meson/test cases/common/88 dep fallback/subprojects/boblib/bob.c b/meson/test cases/common/88 dep fallback/subprojects/boblib/bob.c new file mode 100644 index 000000000..52cf4795e --- /dev/null +++ b/meson/test cases/common/88 dep fallback/subprojects/boblib/bob.c @@ -0,0 +1,8 @@ +#include"bob.h" + +#ifdef _MSC_VER +__declspec(dllexport) +#endif +const char* get_bob(void) { + return "bob"; +} diff --git a/meson/test cases/common/88 dep fallback/subprojects/boblib/bob.h b/meson/test cases/common/88 dep fallback/subprojects/boblib/bob.h new file mode 100644 index 000000000..8dd4b334d --- /dev/null +++ b/meson/test cases/common/88 dep fallback/subprojects/boblib/bob.h @@ -0,0 +1,6 @@ +#pragma once + +#ifdef _MSC_VER +__declspec(dllimport) +#endif +const char* get_bob(void); diff --git a/meson/test cases/common/88 dep fallback/subprojects/boblib/genbob.py b/meson/test cases/common/88 dep fallback/subprojects/boblib/genbob.py new file mode 100644 index 000000000..34af7790e --- /dev/null +++ b/meson/test cases/common/88 dep fallback/subprojects/boblib/genbob.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[1], 'w') as f: + f.write('') diff --git a/meson/test cases/common/88 dep fallback/subprojects/boblib/meson.build b/meson/test cases/common/88 dep fallback/subprojects/boblib/meson.build new file mode 100644 index 000000000..0a72a828d --- /dev/null +++ b/meson/test cases/common/88 dep fallback/subprojects/boblib/meson.build @@ -0,0 +1,18 @@ +project('bob', 'c') + +gensrc_py = find_program('genbob.py') +genbob_h = custom_target('genbob.h', + output : 'genbob.h', + command : [gensrc_py, '@OUTPUT@']) +genbob_c = custom_target('genbob.c', + output : 'genbob.c', + command : [gensrc_py, '@OUTPUT@']) + +boblib = library('bob', ['bob.c', genbob_c]) +bobinc = include_directories('.') + +bob_dep = declare_dependency(link_with : boblib, + sources : [genbob_h], + include_directories : bobinc) + +notfound_dep = dependency('', required: false) diff --git a/meson/test cases/common/88 dep fallback/subprojects/dummylib/meson.build b/meson/test cases/common/88 dep fallback/subprojects/dummylib/meson.build new file mode 100644 index 000000000..3ad33e7b9 --- /dev/null +++ b/meson/test cases/common/88 dep fallback/subprojects/dummylib/meson.build @@ -0,0 +1,4 @@ +project('dummylib', 'c') + +dummy_dep = declare_dependency() +error('this subproject fails to configure') diff --git a/meson/test cases/common/88 dep fallback/tester.c b/meson/test cases/common/88 dep fallback/tester.c new file mode 100644 index 000000000..a46f3f629 --- /dev/null +++ b/meson/test cases/common/88 dep fallback/tester.c @@ -0,0 +1,14 @@ +#include"bob.h" +#include"genbob.h" +#include +#include + +int main(void) { + if(strcmp("bob", get_bob()) == 0) { + printf("Bob is indeed bob.\n"); + } else { + printf("ERROR: bob is not bob.\n"); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/89 default library/ef.cpp b/meson/test cases/common/89 default library/ef.cpp new file mode 100644 index 000000000..34784f88a --- /dev/null +++ b/meson/test cases/common/89 default library/ef.cpp @@ -0,0 +1,8 @@ +#include"ef.h" + +DLL_PUBLIC Ef::Ef() : x(99) { +} + +int DLL_PUBLIC Ef::get_x() const { + return x; +} diff --git a/meson/test cases/common/89 default library/ef.h b/meson/test cases/common/89 default library/ef.h new file mode 100644 index 000000000..21704b557 --- /dev/null +++ b/meson/test cases/common/89 default library/ef.h @@ -0,0 +1,22 @@ +#pragma once + +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +class Ef { +private: + int x; + +public: + + DLL_PUBLIC Ef(); + int DLL_PUBLIC get_x() const; +}; diff --git a/meson/test cases/common/89 default library/eftest.cpp b/meson/test cases/common/89 default library/eftest.cpp new file mode 100644 index 000000000..4d4412d75 --- /dev/null +++ b/meson/test cases/common/89 default library/eftest.cpp @@ -0,0 +1,14 @@ +#include"ef.h" + +#include + +int main(int, char **) { + Ef var; + if(var.get_x() == 99) { + std::cout << "All is fine.\n"; + return 0; + } else { + std::cout << "Something went wrong.\n"; + return 1; + } +} diff --git a/meson/test cases/common/89 default library/meson.build b/meson/test cases/common/89 default library/meson.build new file mode 100644 index 000000000..508f25f5b --- /dev/null +++ b/meson/test cases/common/89 default library/meson.build @@ -0,0 +1,10 @@ +project('default library', 'cpp') + +flib = library('ef', 'ef.cpp') +exe = executable('eftest', 'eftest.cpp', link_with : flib) +test('eftest', exe) + +# Same as above, but using build_target() +flib2 = build_target('ef2', 'ef.cpp', target_type: 'library') +exe2 = executable('eftest2', 'eftest.cpp', link_with : flib2) +test('eftest2', exe2) diff --git a/meson/test cases/common/9 header install/meson.build b/meson/test cases/common/9 header install/meson.build new file mode 100644 index 000000000..891cb596b --- /dev/null +++ b/meson/test cases/common/9 header install/meson.build @@ -0,0 +1,12 @@ +project('header install') + +as_array = ['subdir.h'] + +subdir('vanishing_subdir') +subdir('sub') + +h1 = install_headers('rootdir.h') +h2 = install_headers(as_array, subdir : 'subdir') +h3 = install_headers(subheader) +h4 = install_headers(disabler()) + diff --git a/meson/test cases/common/9 header install/rootdir.h b/meson/test cases/common/9 header install/rootdir.h new file mode 100644 index 000000000..72fb13220 --- /dev/null +++ b/meson/test cases/common/9 header install/rootdir.h @@ -0,0 +1,3 @@ +/* This header goes to include dir root. */ + +int root_func(); diff --git a/meson/test cases/common/9 header install/sub/fileheader.h b/meson/test cases/common/9 header install/sub/fileheader.h new file mode 100644 index 000000000..28e5c8dfe --- /dev/null +++ b/meson/test cases/common/9 header install/sub/fileheader.h @@ -0,0 +1,3 @@ +#pragma once + +#define LIFE "Is life! Na naa, naa-na na." diff --git a/meson/test cases/common/9 header install/sub/meson.build b/meson/test cases/common/9 header install/sub/meson.build new file mode 100644 index 000000000..1ee0d1dd4 --- /dev/null +++ b/meson/test cases/common/9 header install/sub/meson.build @@ -0,0 +1,2 @@ +subheader = files('fileheader.h') + diff --git a/meson/test cases/common/9 header install/subdir.h b/meson/test cases/common/9 header install/subdir.h new file mode 100644 index 000000000..17f768e58 --- /dev/null +++ b/meson/test cases/common/9 header install/subdir.h @@ -0,0 +1,3 @@ +/* This file goes to subdirectory of include root. */ + +int subdir_func(); diff --git a/meson/test cases/common/9 header install/test.json b/meson/test cases/common/9 header install/test.json new file mode 100644 index 000000000..eb12cd037 --- /dev/null +++ b/meson/test cases/common/9 header install/test.json @@ -0,0 +1,8 @@ +{ + "installed": [ + { "type": "file", "file": "usr/include/rootdir.h" }, + { "type": "file", "file": "usr/include/subdir/subdir.h" }, + { "type": "file", "file": "usr/include/vanished.h" }, + { "type": "file", "file": "usr/include/fileheader.h" } + ] +} diff --git a/meson/test cases/common/9 header install/vanishing_subdir/meson.build b/meson/test cases/common/9 header install/vanishing_subdir/meson.build new file mode 100644 index 000000000..a81626c6c --- /dev/null +++ b/meson/test cases/common/9 header install/vanishing_subdir/meson.build @@ -0,0 +1 @@ +install_headers('vanished.h') diff --git a/meson/test cases/common/9 header install/vanishing_subdir/vanished.h b/meson/test cases/common/9 header install/vanishing_subdir/vanished.h new file mode 100644 index 000000000..ed7971b88 --- /dev/null +++ b/meson/test cases/common/9 header install/vanishing_subdir/vanished.h @@ -0,0 +1,5 @@ +#pragma once + +/* This is a header in a subdirectory. Make sure it installs into + * /prefix/include and not /prefix/include/vanishing_subdir. + */ diff --git a/meson/test cases/common/90 gen extra/meson.build b/meson/test cases/common/90 gen extra/meson.build new file mode 100644 index 000000000..cbbdceb2e --- /dev/null +++ b/meson/test cases/common/90 gen extra/meson.build @@ -0,0 +1,40 @@ +project('extra args in gen', 'c') + +prog = find_program('srcgen.py') + +gen = generator(prog, + output : '@BASENAME@.c', + arguments : ['--input=@INPUT@', '--output=@OUTPUT@', '@EXTRA_ARGS@']) + +g1 = gen.process('name.dat') +g2 = gen.process('name.dat', extra_args: '--upper') + +test('basic', executable('basic', 'plain.c', g1)) +test('upper', executable('upper', 'upper.c', g2)) + +prog2 = find_program('srcgen2.py') +basename_gen = generator(prog2, + output : ['@BASENAME@.tab.c', '@BASENAME@.tab.h'], + arguments : ['@BUILD_DIR@', '@BASENAME@', '@INPUT@']) + +basename_src = basename_gen.process('name.l') + +test('basename', executable('basename', basename_src)) + +plainname_gen = generator(prog2, + output : ['@PLAINNAME@.tab.c', '@PLAINNAME@.tab.h'], + arguments : ['@BUILD_DIR@', '@PLAINNAME@', '@INPUT@']) + +plainname_src = plainname_gen.process('name.l') + +test('plainname', executable('plainname', plainname_src)) + +prog3 = find_program('srcgen3.py') +capture_gen = generator(prog3, + output : ['@BASENAME@.yy.c'], + arguments : ['@INPUT@'], + capture : true) + +capture_src = capture_gen.process('name.l') + +test('capture', executable('capture', capture_src)) diff --git a/meson/test cases/common/90 gen extra/name.dat b/meson/test cases/common/90 gen extra/name.dat new file mode 100644 index 000000000..caf5b1caf --- /dev/null +++ b/meson/test cases/common/90 gen extra/name.dat @@ -0,0 +1 @@ +bob_mcbob diff --git a/meson/test cases/common/90 gen extra/name.l b/meson/test cases/common/90 gen extra/name.l new file mode 100644 index 000000000..c4ba277d2 --- /dev/null +++ b/meson/test cases/common/90 gen extra/name.l @@ -0,0 +1,3 @@ +int main(void) { +return 0; +} diff --git a/meson/test cases/common/90 gen extra/plain.c b/meson/test cases/common/90 gen extra/plain.c new file mode 100644 index 000000000..c068a023e --- /dev/null +++ b/meson/test cases/common/90 gen extra/plain.c @@ -0,0 +1,5 @@ +int bob_mcbob(void); + +int main(void) { + return bob_mcbob(); +} diff --git a/meson/test cases/common/90 gen extra/srcgen.py b/meson/test cases/common/90 gen extra/srcgen.py new file mode 100755 index 000000000..c64f54000 --- /dev/null +++ b/meson/test cases/common/90 gen extra/srcgen.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 + +import sys +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument('--input', dest='input', + help='the input file') +parser.add_argument('--output', dest='output', + help='the output file') +parser.add_argument('--upper', dest='upper', action='store_true', default=False, + help='Convert to upper case.') + +c_templ = '''int %s(void) { + return 0; +} +''' + +options = parser.parse_args(sys.argv[1:]) + +with open(options.input) as f: + funcname = f.readline().strip() +if options.upper: + funcname = funcname.upper() + +with open(options.output, 'w') as f: + f.write(c_templ % funcname) diff --git a/meson/test cases/common/90 gen extra/srcgen2.py b/meson/test cases/common/90 gen extra/srcgen2.py new file mode 100644 index 000000000..9cdf12d59 --- /dev/null +++ b/meson/test cases/common/90 gen extra/srcgen2.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 + +import os +import sys +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument('target_dir', + help='the target dir') +parser.add_argument('stem', + help='the stem') +parser.add_argument('input', + help='the input file') + +options = parser.parse_args(sys.argv[1:]) + +with open(options.input) as f: + content = f.read() + + +output_c = os.path.join(options.target_dir, options.stem + ".tab.c") +with open(output_c, 'w') as f: + f.write(content) + + +output_h = os.path.join(options.target_dir, options.stem + ".tab.h") +h_content = '''#pragma once + +int myfun(void); +''' +with open(output_h, 'w') as f: + f.write(h_content) diff --git a/meson/test cases/common/90 gen extra/srcgen3.py b/meson/test cases/common/90 gen extra/srcgen3.py new file mode 100644 index 000000000..b7371143a --- /dev/null +++ b/meson/test cases/common/90 gen extra/srcgen3.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import sys +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument('input', + help='the input file') + +options = parser.parse_args(sys.argv[1:]) + +with open(options.input) as f: + content = f.read().strip() + +print(content) diff --git a/meson/test cases/common/90 gen extra/upper.c b/meson/test cases/common/90 gen extra/upper.c new file mode 100644 index 000000000..82c32529e --- /dev/null +++ b/meson/test cases/common/90 gen extra/upper.c @@ -0,0 +1,5 @@ +int BOB_MCBOB(void); + +int main(void) { + return BOB_MCBOB(); +} diff --git a/meson/test cases/common/91 benchmark/delayer.c b/meson/test cases/common/91 benchmark/delayer.c new file mode 100644 index 000000000..b410c4605 --- /dev/null +++ b/meson/test cases/common/91 benchmark/delayer.c @@ -0,0 +1,20 @@ +/* Simple prog that sleeps for a random time. */ + +#include +#include +#if defined(_WIN32) +#include +#endif + +int main(void) { + srand(time(NULL)); +#if !defined(_WIN32) + struct timespec t; + t.tv_sec = 0; + t.tv_nsec = 199999999.0*rand()/RAND_MAX; + nanosleep(&t, NULL); +#else + Sleep(50.0*rand()/RAND_MAX); +#endif + return 0; +} diff --git a/meson/test cases/common/91 benchmark/meson.build b/meson/test cases/common/91 benchmark/meson.build new file mode 100644 index 000000000..9d583d2e1 --- /dev/null +++ b/meson/test cases/common/91 benchmark/meson.build @@ -0,0 +1,4 @@ +project('benchmark', 'c') + +delayer = executable('delayer', 'delayer.c', c_args : '-D_GNU_SOURCE') +benchmark('delayer', delayer) diff --git a/meson/test cases/common/92 test workdir/meson.build b/meson/test cases/common/92 test workdir/meson.build new file mode 100644 index 000000000..a8290f7e0 --- /dev/null +++ b/meson/test cases/common/92 test workdir/meson.build @@ -0,0 +1,8 @@ +project('test workdir', 'c') + +exe = executable('opener', 'opener.c') + +test('basic', exe, workdir : meson.source_root()) +test('shouldfail', exe, should_fail : true) + +subdir('subdir') diff --git a/meson/test cases/common/92 test workdir/opener.c b/meson/test cases/common/92 test workdir/opener.c new file mode 100644 index 000000000..c946e18c2 --- /dev/null +++ b/meson/test cases/common/92 test workdir/opener.c @@ -0,0 +1,12 @@ +// This test only succeeds if run in the source root dir. + +#include + +int main(void) { + FILE *f = fopen("opener.c", "r"); + if(f) { + fclose(f); + return 0; + } + return 1; +} diff --git a/meson/test cases/common/92 test workdir/subdir/checker.py b/meson/test cases/common/92 test workdir/subdir/checker.py new file mode 100755 index 000000000..66e287d4a --- /dev/null +++ b/meson/test cases/common/92 test workdir/subdir/checker.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +import sys + +data = open(sys.argv[1], 'rb').read() diff --git a/meson/test cases/common/92 test workdir/subdir/meson.build b/meson/test cases/common/92 test workdir/subdir/meson.build new file mode 100644 index 000000000..687a1cf23 --- /dev/null +++ b/meson/test cases/common/92 test workdir/subdir/meson.build @@ -0,0 +1,4 @@ +exe2 = executable('dummy', '../opener.c') +test('subdir', find_program('checker.py'), + workdir : meson.source_root(), + args: [exe2]) diff --git a/meson/test cases/common/93 suites/exe1.c b/meson/test cases/common/93 suites/exe1.c new file mode 100644 index 000000000..6e3236221 --- /dev/null +++ b/meson/test cases/common/93 suites/exe1.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am test exe1.\n"); + return 0; +} diff --git a/meson/test cases/common/93 suites/exe2.c b/meson/test cases/common/93 suites/exe2.c new file mode 100644 index 000000000..21a9dd61f --- /dev/null +++ b/meson/test cases/common/93 suites/exe2.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am test exe2.\n"); + return 0; +} diff --git a/meson/test cases/common/93 suites/meson.build b/meson/test cases/common/93 suites/meson.build new file mode 100644 index 000000000..2346b5bc9 --- /dev/null +++ b/meson/test cases/common/93 suites/meson.build @@ -0,0 +1,9 @@ +project('multiple test suites', 'c') + +subproject('sub') + +exe1 = executable('exe1', 'exe1.c') +exe2 = executable('exe2', 'exe2.c') + +test('exe1', exe1) +test('exe2', exe2, suite : ['suite2', ['super-special']]) diff --git a/meson/test cases/common/93 suites/subprojects/sub/meson.build b/meson/test cases/common/93 suites/subprojects/sub/meson.build new file mode 100644 index 000000000..697d95f1c --- /dev/null +++ b/meson/test cases/common/93 suites/subprojects/sub/meson.build @@ -0,0 +1,7 @@ +project('subproject test suites', 'c') + +sub1 = executable('sub1', 'sub1.c') +sub2 = executable('sub2', 'sub2.c') + +test('sub1', sub1) +test('sub2', sub2, suite : 'suite2') diff --git a/meson/test cases/common/93 suites/subprojects/sub/sub1.c b/meson/test cases/common/93 suites/subprojects/sub/sub1.c new file mode 100644 index 000000000..e06762443 --- /dev/null +++ b/meson/test cases/common/93 suites/subprojects/sub/sub1.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am test sub1.\n"); + return 0; +} diff --git a/meson/test cases/common/93 suites/subprojects/sub/sub2.c b/meson/test cases/common/93 suites/subprojects/sub/sub2.c new file mode 100644 index 000000000..0b457dc01 --- /dev/null +++ b/meson/test cases/common/93 suites/subprojects/sub/sub2.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("I am test sub2.\n"); + return 0; +} diff --git a/meson/test cases/common/94 threads/meson.build b/meson/test cases/common/94 threads/meson.build new file mode 100644 index 000000000..1fbb15ac7 --- /dev/null +++ b/meson/test cases/common/94 threads/meson.build @@ -0,0 +1,16 @@ +project('threads', 'cpp', 'c', + default_options : ['cpp_std=c++11']) + +threaddep = dependency('threads') + +test('cppthreadtest', + executable('cppthreadprog', 'threadprog.cpp', + dependencies : threaddep + ) +) + +test('cthreadtest', + executable('cthreadprog', 'threadprog.c', + dependencies : threaddep + ) +) diff --git a/meson/test cases/common/94 threads/threadprog.c b/meson/test cases/common/94 threads/threadprog.c new file mode 100644 index 000000000..7bfb7c444 --- /dev/null +++ b/meson/test cases/common/94 threads/threadprog.c @@ -0,0 +1,40 @@ +#if defined _WIN32 + +#include +#include + +DWORD WINAPI thread_func(void) { + printf("Printing from a thread.\n"); + return 0; +} + +int main(void) { + DWORD id; + HANDLE th; + printf("Starting thread.\n"); + th = CreateThread(NULL, 0, thread_func, NULL, 0, &id); + WaitForSingleObject(th, INFINITE); + printf("Stopped thread.\n"); + return 0; +} +#else + +#include +#include + +void* main_func(void) { + printf("Printing from a thread.\n"); + return NULL; +} + +int main(void) { + pthread_t thread; + int rc; + printf("Starting thread.\n"); + rc = pthread_create(&thread, NULL, main_func, NULL); + rc = pthread_join(thread, NULL); + printf("Stopped thread.\n"); + return rc; +} + +#endif diff --git a/meson/test cases/common/94 threads/threadprog.cpp b/meson/test cases/common/94 threads/threadprog.cpp new file mode 100644 index 000000000..3c69dc3a8 --- /dev/null +++ b/meson/test cases/common/94 threads/threadprog.cpp @@ -0,0 +1,43 @@ +/* On Windows not all versions of VS support C++11 and + * some (most?) versions of mingw don't support std::thread, + * even though they do support c++11. Since we only care about + * threads working, do the test with raw win threads. + */ + +#if defined _WIN32 + +#include +#include + +DWORD WINAPI thread_func(LPVOID) { + printf("Printing from a thread.\n"); + return 0; +} + +int main(void) { + printf("Starting thread.\n"); + HANDLE th; + DWORD id; + th = CreateThread(NULL, 0, thread_func, NULL, 0, &id); + WaitForSingleObject(th, INFINITE); + printf("Stopped thread.\n"); + return 0; +} +#else + +#include +#include + +void main_func(void) { + printf("Printing from a thread.\n"); +} + +int main(void) { + printf("Starting thread.\n"); + std::thread th(main_func); + th.join(); + printf("Stopped thread.\n"); + return 0; +} + +#endif diff --git a/meson/test cases/common/95 manygen/depuser.c b/meson/test cases/common/95 manygen/depuser.c new file mode 100644 index 000000000..1ab248748 --- /dev/null +++ b/meson/test cases/common/95 manygen/depuser.c @@ -0,0 +1,8 @@ +#include"gen_func.h" + +int main(void) { + unsigned int i = (unsigned int) gen_func_in_lib(); + unsigned int j = (unsigned int) gen_func_in_obj(); + unsigned int k = (unsigned int) gen_func_in_src(); + return (int)(i + j + k); +} diff --git a/meson/test cases/common/95 manygen/meson.build b/meson/test cases/common/95 manygen/meson.build new file mode 100644 index 000000000..e70a55a17 --- /dev/null +++ b/meson/test cases/common/95 manygen/meson.build @@ -0,0 +1,14 @@ +project('manygen', 'c') + +if meson.is_cross_build() + # FIXME error out with skip message once cross test runner + # recognizes it. + message('Not running this test during cross build.') +else + subdir('subdir') + + exe = executable('depuser', 'depuser.c', + generated) + + test('depuser test', exe) +endif diff --git a/meson/test cases/common/95 manygen/subdir/funcinfo.def b/meson/test cases/common/95 manygen/subdir/funcinfo.def new file mode 100644 index 000000000..b0741862e --- /dev/null +++ b/meson/test cases/common/95 manygen/subdir/funcinfo.def @@ -0,0 +1 @@ +gen_func diff --git a/meson/test cases/common/95 manygen/subdir/manygen.py b/meson/test cases/common/95 manygen/subdir/manygen.py new file mode 100755 index 000000000..931fb61de --- /dev/null +++ b/meson/test cases/common/95 manygen/subdir/manygen.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 + + +# Generates a static library, object file, source +# file and a header file. + +import sys, os +import subprocess + +with open(sys.argv[1]) as f: + funcname = f.readline().strip() +outdir = sys.argv[2] +buildtype_args = sys.argv[3] +compiler_type = sys.argv[4] +compiler = sys.argv[5:] + +if not os.path.isdir(outdir): + print('Outdir does not exist.') + sys.exit(1) + +if compiler_type == 'msvc': + libsuffix = '.lib' + is_vs = True + if any(['clang-cl' in c for c in compiler]): + linker = 'llvm-lib' + else: + linker = 'lib' +else: + libsuffix = '.a' + is_vs = False + linker = 'ar' + +objsuffix = '.o' + +outo = os.path.join(outdir, funcname + objsuffix) +outa = os.path.join(outdir, funcname + libsuffix) +outh = os.path.join(outdir, funcname + '.h') +outc = os.path.join(outdir, funcname + '.c') + +tmpc = 'diibadaaba.c' +tmpo = 'diibadaaba' + objsuffix + +with open(outc, 'w') as f: + f.write('''#include"{}.h" +int {}_in_src(void) {{ + return 0; +}} +'''.format(funcname, funcname)) + +with open(outh, 'w') as f: + f.write('''#pragma once +int {}_in_lib(void); +int {}_in_obj(void); +int {}_in_src(void); +'''.format(funcname, funcname, funcname)) + +with open(tmpc, 'w') as f: + f.write('''int %s_in_obj(void) { + return 0; +} +''' % funcname) + +if is_vs: + subprocess.check_call(compiler + ['/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc]) +else: + subprocess.check_call(compiler + ['-c', '-o', outo, tmpc]) + +with open(tmpc, 'w') as f: + f.write('''int %s_in_lib() { + return 0; +} +''' % funcname) + +if is_vs: + subprocess.check_call(compiler + ['/nologo', '/c', '/Fo' + tmpo, tmpc]) + subprocess.check_call([linker, '/NOLOGO', '/OUT:' + outa, tmpo]) +else: + subprocess.check_call(compiler + ['-c', '-o', tmpo, tmpc]) + subprocess.check_call([linker, 'csr', outa, tmpo]) + +os.unlink(tmpo) +os.unlink(tmpc) diff --git a/meson/test cases/common/95 manygen/subdir/meson.build b/meson/test cases/common/95 manygen/subdir/meson.build new file mode 100644 index 000000000..56f60e6c1 --- /dev/null +++ b/meson/test cases/common/95 manygen/subdir/meson.build @@ -0,0 +1,26 @@ +gen = files('manygen.py') +py3_bin = import('python3').find_python() + +buildtype = get_option('buildtype') +buildtype_args = '-Dfooxxx' # a useless compiler argument +cc = meson.get_compiler('c') +if cc.get_argument_syntax() == 'msvc' + # We need our manually generated code to use the same CRT as the executable. + # Taken from compilers.py since build files do not have access to this. + if buildtype == 'debug' + buildtype_args = '/MDd' + elif buildtype == 'debugoptimized' + buildtype_args = '/MDd' + elif buildtype == 'release' + buildtype_args = '/MD' + endif + outfiles = ['gen_func.lib', 'gen_func.c', 'gen_func.h', 'gen_func.o'] +else + outfiles = ['gen_func.a', 'gen_func.c', 'gen_func.h', 'gen_func.o'] +endif + +generated = custom_target('manygen', + output : outfiles, + input : ['funcinfo.def'], + command : [py3_bin, gen[0], '@INPUT@', '@OUTDIR@', buildtype_args, cc.get_argument_syntax(), cc.cmd_array()], +) diff --git a/meson/test cases/common/96 stringdef/meson.build b/meson/test cases/common/96 stringdef/meson.build new file mode 100644 index 000000000..3f9170e20 --- /dev/null +++ b/meson/test cases/common/96 stringdef/meson.build @@ -0,0 +1,3 @@ +project('stringdef', 'c') + +test('stringdef', executable('stringdef', 'stringdef.c', c_args : '-DFOO="bar"')) diff --git a/meson/test cases/common/96 stringdef/stringdef.c b/meson/test cases/common/96 stringdef/stringdef.c new file mode 100644 index 000000000..17e29fdc1 --- /dev/null +++ b/meson/test cases/common/96 stringdef/stringdef.c @@ -0,0 +1,10 @@ +#include +#include + +int main(void) { + if(strcmp(FOO, "bar")) { + printf("FOO is misquoted: %s\n", FOO); + return 1; + } + return 0; +} diff --git a/meson/test cases/common/97 find program path/meson.build b/meson/test cases/common/97 find program path/meson.build new file mode 100644 index 000000000..0a812499a --- /dev/null +++ b/meson/test cases/common/97 find program path/meson.build @@ -0,0 +1,22 @@ +project('find program', 'c') + +python = import('python3').find_python() + +# Source file via string +prog = find_program('program.py') +# Source file via files() +progf = files('program.py') +# Built file +py = configure_file(input : 'program.py', + output : 'builtprogram.py', + configuration : configuration_data()) + +foreach f : [prog, progf, py, find_program(py), find_program(progf)] + ret = run_command(python, f) + assert(ret.returncode() == 0, 'can\'t manually run @0@'.format(prog.path())) + assert(ret.stdout().strip() == 'Found', 'wrong output from manually-run @0@'.format(prog.path())) + + ret = run_command(f) + assert(ret.returncode() == 0, 'can\'t run @0@'.format(prog.path())) + assert(ret.stdout().strip() == 'Found', 'wrong output from @0@'.format(prog.path())) +endforeach diff --git a/meson/test cases/common/97 find program path/program.py b/meson/test cases/common/97 find program path/program.py new file mode 100755 index 000000000..2ebc56419 --- /dev/null +++ b/meson/test cases/common/97 find program path/program.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +print("Found") diff --git a/meson/test cases/common/98 subproject subdir/meson.build b/meson/test cases/common/98 subproject subdir/meson.build new file mode 100644 index 000000000..6c3811f3f --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/meson.build @@ -0,0 +1,67 @@ +project('proj', 'c') +subproject('sub') +libSub = dependency('sub', fallback: ['sub', 'libSub']) + +exe = executable('prog', 'prog.c', dependencies: libSub) +test('subproject subdir', exe) + +# Verify the subproject has placed dependency override. +dependency('sub-1.0') + +# Verify we can now take 'sub' dependency without fallback, but only version 1.0. +dependency('sub') +d = dependency('sub', version : '>=2.0', required : false) +assert(not d.found(), 'version should not match') + +# Verify that not-found does not get cached, we can still fallback afterward. +dependency('sub2', required : false) +d = dependency('sub2', fallback: ['sub', 'libSub']) +assert(d.found(), 'Should fallback even if a previous call returned not-found') + +# Verify we can get a fallback dependency without specifying the variable name, +# because the subproject overridden 'sub-novar'. +dependency('sub-novar', fallback : 'sub_novar') + +# Verify a subproject can force a dependency to be not-found +d = dependency('sub-notfound', fallback : 'sub_novar', required : false) +assert(not d.found(), 'Dependency should be not-found') + +# Verify that implicit fallback works because subprojects/sub_implicit directory exists +d = dependency('sub_implicit', default_options: 'opt=overriden') +assert(d.found(), 'Should implicitly fallback') + +# Verify that implicit fallback works because sub_implicit.wrap has +# `dependency_names=sub_implicit_provide1` and the subproject overrides sub_implicit_provide1. +d = dependency('sub_implicit_provide1') +assert(d.found(), 'Should implicitly fallback') + +# Verify that implicit fallback works because sub_implicit.wrap has +# `sub_implicit_provide2=sub_implicit_provide2_dep` and does not override +# sub_implicit_provide2. +d = dependency('sub_implicit_provide2') +assert(d.found(), 'Should implicitly fallback') + +# sub_implicit.wrap provides glib-2.0 and we already configured that subproject, +# so we must not return the system dependency here. Using glib-2.0 here because +# some CI runners have it installed. +d = dependency('glib-2.0', required : false) +assert(d.found()) +assert(d.type_name() == 'internal') + +# sub_implicit.wrap provides gobject-2.0 and we already configured that subproject, +# so we must not return the system dependency here. But since the subproject did +# not override that dependency and its not required, not-found should be returned. +# Using gobject-2.0 here because some CI runners have it installed. +d = dependency('gobject-2.0', required : false) +assert(not d.found()) + +# Verify that implicit fallback works because subprojects/sub_implicit/subprojects/subsub +# directory exists. +d = dependency('subsub') +assert(d.found(), 'Should be able to fallback to sub-subproject') + +# Verify that implicit fallback works because +# subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap +# file exists. +d = dependency('subsubsub') +assert(d.found(), 'Should be able to fallback to sub-sub-subproject') diff --git a/meson/test cases/common/98 subproject subdir/prog.c b/meson/test cases/common/98 subproject subdir/prog.c new file mode 100644 index 000000000..9035ff114 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/prog.c @@ -0,0 +1,5 @@ +#include + +int main(void) { + return sub(); +} diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/meson.build b/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/meson.build new file mode 100644 index 000000000..53233ab4e --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/meson.build @@ -0,0 +1,3 @@ +lib = static_library('sub', 'sub.c') +libSub = declare_dependency(include_directories: include_directories('.'), link_with: lib) +meson.override_dependency('sub-1.0', libSub) diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.c b/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.c new file mode 100644 index 000000000..e748ac750 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.c @@ -0,0 +1,5 @@ +#include "sub.h" + +int sub(void) { + return 0; +} diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.h b/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.h new file mode 100644 index 000000000..2b59a3a32 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub/lib/sub.h @@ -0,0 +1,6 @@ +#ifndef SUB_H +#define SUB_H + +int sub(void); + +#endif diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub/meson.build b/meson/test cases/common/98 subproject subdir/subprojects/sub/meson.build new file mode 100644 index 000000000..d8c4dce70 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub/meson.build @@ -0,0 +1,2 @@ +project('sub', 'c', version : '1.0') +subdir('lib') diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit.wrap b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit.wrap new file mode 100644 index 000000000..a809c43b1 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit.wrap @@ -0,0 +1,6 @@ +[wrap-file] + +[provide] +glib-2.0 = glib_dep +dependency_names = sub_implicit_provide1, gobject-2.0 +sub_implicit_provide2 = sub_implicit_provide2_dep diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson.build b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson.build new file mode 100644 index 000000000..9f436042f --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson.build @@ -0,0 +1,13 @@ +project('sub_implicit', 'c', version : '1.0') + +dep = declare_dependency() +meson.override_dependency('sub_implicit', dep) +meson.override_dependency('sub_implicit_provide1', dep) + +# This one is not overridden but the wrap file tells the variable name to use. +sub_implicit_provide2_dep = dep + +# This one is not overridden but the wrap file tells the variable name to use. +glib_dep = dep + +assert(get_option('opt') == 'overriden') \ No newline at end of file diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson_options.txt b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson_options.txt new file mode 100644 index 000000000..770178ce7 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/meson_options.txt @@ -0,0 +1 @@ +option('opt', type: 'string', value: 'default') \ No newline at end of file diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/foo.h b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/foo.h new file mode 100644 index 000000000..a8ad3daba --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/foo.h @@ -0,0 +1 @@ +#define DUMMY 42 diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build new file mode 100644 index 000000000..fdbb03fe6 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/meson.build @@ -0,0 +1,7 @@ +project('subsub') + +meson.override_dependency('subsub', declare_dependency()) + +# Regression test: Installing a header from nested sub-subproject used to raise: +# ERROR: Sandbox violation: Tried to grab file foo.h from a nested subproject. +install_headers('foo.h') diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip new file mode 100644 index 000000000..dfb7576f3 Binary files /dev/null and b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/packagefiles/subsubsub-1.0.zip differ diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap new file mode 100644 index 000000000..6567ed010 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap @@ -0,0 +1,4 @@ +[wrap-file] +directory = subsubsub-1.0 +source_filename = subsubsub-1.0.zip +source_hash = c073a96b7251937e53216578f6f03d91b84816618a0f1ce3ecfb867beddf1498 diff --git a/meson/test cases/common/98 subproject subdir/subprojects/sub_novar/meson.build b/meson/test cases/common/98 subproject subdir/subprojects/sub_novar/meson.build new file mode 100644 index 000000000..6450a1032 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/subprojects/sub_novar/meson.build @@ -0,0 +1,4 @@ +project('sub-novar', 'c', version : '1.0') + +meson.override_dependency('sub-novar', declare_dependency()) +meson.override_dependency('sub-notfound', dependency('', required : false)) diff --git a/meson/test cases/common/98 subproject subdir/test.json b/meson/test cases/common/98 subproject subdir/test.json new file mode 100644 index 000000000..1921fe559 --- /dev/null +++ b/meson/test cases/common/98 subproject subdir/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + { "type": "file", "file": "usr/include/foo.h" } + ] +} diff --git a/meson/test cases/common/99 postconf/meson.build b/meson/test cases/common/99 postconf/meson.build new file mode 100644 index 000000000..12b3c5bfe --- /dev/null +++ b/meson/test cases/common/99 postconf/meson.build @@ -0,0 +1,5 @@ +project('postconf script', 'c') + +meson.add_postconf_script('postconf.py') + +test('post', executable('prog', 'prog.c')) diff --git a/meson/test cases/common/99 postconf/postconf.py b/meson/test cases/common/99 postconf/postconf.py new file mode 100644 index 000000000..950c7064a --- /dev/null +++ b/meson/test cases/common/99 postconf/postconf.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import os + +template = '''#pragma once + +#define THE_NUMBER {} +''' + +input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat') +output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h') + +with open(input_file) as f: + data = f.readline().strip() +with open(output_file, 'w') as f: + f.write(template.format(data)) diff --git a/meson/test cases/common/99 postconf/prog.c b/meson/test cases/common/99 postconf/prog.c new file mode 100644 index 000000000..85a25a300 --- /dev/null +++ b/meson/test cases/common/99 postconf/prog.c @@ -0,0 +1,5 @@ +#include"generated.h" + +int main(void) { + return THE_NUMBER != 9; +} diff --git a/meson/test cases/common/99 postconf/raw.dat b/meson/test cases/common/99 postconf/raw.dat new file mode 100644 index 000000000..ec635144f --- /dev/null +++ b/meson/test cases/common/99 postconf/raw.dat @@ -0,0 +1 @@ +9 diff --git a/meson/test cases/csharp/1 basic/meson.build b/meson/test cases/csharp/1 basic/meson.build new file mode 100644 index 000000000..09e46c293 --- /dev/null +++ b/meson/test cases/csharp/1 basic/meson.build @@ -0,0 +1,4 @@ +project('simple c#', 'cs') + +e = executable('prog', 'prog.cs', 'text.cs', install : true) +test('basic', e) diff --git a/meson/test cases/csharp/1 basic/prog.cs b/meson/test cases/csharp/1 basic/prog.cs new file mode 100644 index 000000000..6ee47b074 --- /dev/null +++ b/meson/test cases/csharp/1 basic/prog.cs @@ -0,0 +1,8 @@ +using System; + +public class Prog { + static public void Main () { + TextGetter tg = new TextGetter(); + Console.WriteLine(tg.getText()); + } +} diff --git a/meson/test cases/csharp/1 basic/test.json b/meson/test cases/csharp/1 basic/test.json new file mode 100644 index 000000000..650a6e208 --- /dev/null +++ b/meson/test cases/csharp/1 basic/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "file", "file": "usr/bin/prog.exe"}, + {"type": "pdb", "file": "usr/bin/prog"} + ] +} diff --git a/meson/test cases/csharp/1 basic/text.cs b/meson/test cases/csharp/1 basic/text.cs new file mode 100644 index 000000000..c83c424c8 --- /dev/null +++ b/meson/test cases/csharp/1 basic/text.cs @@ -0,0 +1,7 @@ +using System; + +public class TextGetter { + public String getText() { + return "C# is working."; + } +} diff --git a/meson/test cases/csharp/2 library/helper.cs b/meson/test cases/csharp/2 library/helper.cs new file mode 100644 index 000000000..266e37969 --- /dev/null +++ b/meson/test cases/csharp/2 library/helper.cs @@ -0,0 +1,7 @@ +using System; + +public class Helper { + public void print() { + Console.WriteLine("Library class called."); + } +} diff --git a/meson/test cases/csharp/2 library/meson.build b/meson/test cases/csharp/2 library/meson.build new file mode 100644 index 000000000..6b246a4bd --- /dev/null +++ b/meson/test cases/csharp/2 library/meson.build @@ -0,0 +1,15 @@ +project('C# library', 'cs') + +python3 = import('python3').find_python() +generated_sources = custom_target('gen_sources', + input: 'helper.cs', + output: 'helper.cs', + command: [python3, '-c', + 'import shutil, sys; shutil.copyfile(sys.argv[1], sys.argv[2])', + '@INPUT@', '@OUTPUT@'] +) + +l = shared_library('helper', generated_sources, install : true) + +e = executable('prog', 'prog.cs', link_with : l, install : true) +test('libtest', e) diff --git a/meson/test cases/csharp/2 library/prog.cs b/meson/test cases/csharp/2 library/prog.cs new file mode 100644 index 000000000..8bf6a3136 --- /dev/null +++ b/meson/test cases/csharp/2 library/prog.cs @@ -0,0 +1,8 @@ +using System; + +public class Prog { + static public void Main () { + Helper h = new Helper(); + h.print(); + } +} diff --git a/meson/test cases/csharp/2 library/test.json b/meson/test cases/csharp/2 library/test.json new file mode 100644 index 000000000..0523f4559 --- /dev/null +++ b/meson/test cases/csharp/2 library/test.json @@ -0,0 +1,9 @@ +{ + "installed": [ + {"type": "file", "file": "usr/bin/prog.exe"}, + {"type": "pdb", "file": "usr/bin/prog"}, + {"type": "file", "platform": "msvc", "file": "usr/bin/helper.dll"}, + {"type": "pdb", "file": "usr/bin/helper"}, + {"type": "file", "platform": "gcc", "file": "usr/lib/helper.dll"} + ] +} diff --git a/meson/test cases/csharp/3 resource/TestRes.resx b/meson/test cases/csharp/3 resource/TestRes.resx new file mode 100644 index 000000000..c85f85c36 --- /dev/null +++ b/meson/test cases/csharp/3 resource/TestRes.resx @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +text/microsoft-resx1.3System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089Hello from resources! + \ No newline at end of file diff --git a/meson/test cases/csharp/3 resource/meson.build b/meson/test cases/csharp/3 resource/meson.build new file mode 100644 index 000000000..25b273d83 --- /dev/null +++ b/meson/test cases/csharp/3 resource/meson.build @@ -0,0 +1,6 @@ +project('C# resource', 'cs') + +e = executable('resprog', 'resprog.cs', +resources : 'TestRes.resx') + +test('restest', e) diff --git a/meson/test cases/csharp/3 resource/resprog.cs b/meson/test cases/csharp/3 resource/resprog.cs new file mode 100644 index 000000000..177201c08 --- /dev/null +++ b/meson/test cases/csharp/3 resource/resprog.cs @@ -0,0 +1,13 @@ +using System; +using System.Resources; + +public class Prog { + + static public void Main () { + ResourceManager res = new ResourceManager(typeof(TestRes)); + Console.WriteLine(res.GetString("message")); + } + + internal class TestRes { + } +} diff --git a/meson/test cases/csharp/4 external dep/hello.txt b/meson/test cases/csharp/4 external dep/hello.txt new file mode 100644 index 000000000..980a0d5f1 --- /dev/null +++ b/meson/test cases/csharp/4 external dep/hello.txt @@ -0,0 +1 @@ +Hello World! diff --git a/meson/test cases/csharp/4 external dep/meson.build b/meson/test cases/csharp/4 external dep/meson.build new file mode 100644 index 000000000..019d618b7 --- /dev/null +++ b/meson/test cases/csharp/4 external dep/meson.build @@ -0,0 +1,9 @@ +project('C# external library', 'cs') +glib_sharp_2 = dependency('glib-sharp-2.0', required : false) + +if not glib_sharp_2.found() + error('MESON_SKIP_TEST glib# not found.') +endif + +e = executable('prog', 'prog.cs', dependencies: glib_sharp_2, install : true) +test('libtest', e, args: [join_paths(meson.current_source_dir(), 'hello.txt')]) diff --git a/meson/test cases/csharp/4 external dep/prog.cs b/meson/test cases/csharp/4 external dep/prog.cs new file mode 100644 index 000000000..9393fef5d --- /dev/null +++ b/meson/test cases/csharp/4 external dep/prog.cs @@ -0,0 +1,8 @@ +using System; +using GLib; + +public class Prog { + static public void Main (string[] args) { + Console.WriteLine(GLib.FileUtils.GetFileContents(args[0])); + } +} diff --git a/meson/test cases/csharp/4 external dep/test.json b/meson/test cases/csharp/4 external dep/test.json new file mode 100644 index 000000000..a94303f3c --- /dev/null +++ b/meson/test cases/csharp/4 external dep/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"type": "file", "file": "usr/bin/prog.exe"} + ] +} diff --git a/meson/test cases/cuda/1 simple/meson.build b/meson/test cases/cuda/1 simple/meson.build new file mode 100644 index 000000000..19af734af --- /dev/null +++ b/meson/test cases/cuda/1 simple/meson.build @@ -0,0 +1,5 @@ +project('simple', 'cuda', version : '1.0.0') + +exe = executable('prog', 'prog.cu') +test('cudatest', exe) + diff --git a/meson/test cases/cuda/1 simple/prog.cu b/meson/test cases/cuda/1 simple/prog.cu new file mode 100644 index 000000000..b893bd331 --- /dev/null +++ b/meson/test cases/cuda/1 simple/prog.cu @@ -0,0 +1,30 @@ +#include + +int main(void) { + int cuda_devices = 0; + std::cout << "CUDA version: " << CUDART_VERSION << "\n"; + cudaGetDeviceCount(&cuda_devices); + if(cuda_devices == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n"; + cudaDeviceProp props; + cudaGetDeviceProperties(&props, 0); + std::cout << "Properties of device 0.\n\n"; + + std::cout << " Name: " << props.name << "\n"; + std::cout << " Global memory: " << props.totalGlobalMem << "\n"; + std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n"; + std::cout << " Constant memory: " << props.totalConstMem << "\n"; + std::cout << " Block registers: " << props.regsPerBlock << "\n"; + + std::cout << " Warp size: " << props.warpSize << "\n"; + std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n"; + std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n"; + std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n"; + std::cout << "\n"; + + return 0; +} + diff --git a/meson/test cases/cuda/10 cuda dependency/c/meson.build b/meson/test cases/cuda/10 cuda dependency/c/meson.build new file mode 100644 index 000000000..921bc43a0 --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/c/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.c', dependencies: dependency('cuda')) +test('cudatest', exe) diff --git a/meson/test cases/cuda/10 cuda dependency/c/prog.c b/meson/test cases/cuda/10 cuda dependency/c/prog.c new file mode 100644 index 000000000..ed9333efb --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/c/prog.c @@ -0,0 +1,19 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + printf("No CUDA hardware found. Exiting.\n"); + return 0; + } + + printf("Found %i CUDA devices.\n", n); + return 0; +} diff --git a/meson/test cases/cuda/10 cuda dependency/cpp/meson.build b/meson/test cases/cuda/10 cuda dependency/cpp/meson.build new file mode 100644 index 000000000..a661b8875 --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/cpp/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.cc', dependencies: dependency('cuda')) +test('cudatest', exe) diff --git a/meson/test cases/cuda/10 cuda dependency/cpp/prog.cc b/meson/test cases/cuda/10 cuda dependency/cpp/prog.cc new file mode 100644 index 000000000..4832afa81 --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/cpp/prog.cc @@ -0,0 +1,19 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No CUDA hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << " CUDA devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/10 cuda dependency/meson.build b/meson/test cases/cuda/10 cuda dependency/meson.build new file mode 100644 index 000000000..3e602b668 --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/meson.build @@ -0,0 +1,6 @@ +project('cuda dependency', 'c', 'cpp') + +subdir('c') +subdir('cpp') +subdir('modules') +subdir('version_reqs') diff --git a/meson/test cases/cuda/10 cuda dependency/modules/meson.build b/meson/test cases/cuda/10 cuda dependency/modules/meson.build new file mode 100644 index 000000000..0da43f29d --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/modules/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.cc', dependencies: dependency('cuda', modules: ['cublas'])) +test('cudatest', exe) diff --git a/meson/test cases/cuda/10 cuda dependency/modules/prog.cc b/meson/test cases/cuda/10 cuda dependency/modules/prog.cc new file mode 100644 index 000000000..b4af4d41f --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/modules/prog.cc @@ -0,0 +1,33 @@ +#include +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No CUDA hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << " CUDA devices.\n"; + + cublasHandle_t handle; + if (cublasCreate(&handle) != CUBLAS_STATUS_SUCCESS) { + std::cout << "cuBLAS initialization failed. Exiting.\n"; + return -1; + } + + std::cout << "Initialized cuBLAS\n"; + if (cublasDestroy(handle) != CUBLAS_STATUS_SUCCESS) { + std::cout << "cuBLAS de-initialization failed. Exiting.\n"; + return -1; + } + + return 0; +} diff --git a/meson/test cases/cuda/10 cuda dependency/version_reqs/meson.build b/meson/test cases/cuda/10 cuda dependency/version_reqs/meson.build new file mode 100644 index 000000000..45b5daa1c --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/version_reqs/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.cc', dependencies: dependency('cuda', version: ['>=8.5', '<10'], required: false, disabler: true)) +test('cudatest', exe) diff --git a/meson/test cases/cuda/10 cuda dependency/version_reqs/prog.cc b/meson/test cases/cuda/10 cuda dependency/version_reqs/prog.cc new file mode 100644 index 000000000..56688303f --- /dev/null +++ b/meson/test cases/cuda/10 cuda dependency/version_reqs/prog.cc @@ -0,0 +1,28 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + std::cout << "Compiled against CUDA version: " << CUDART_VERSION << "\n"; + int runtime_version = 0; + cudaError_t r = cudaRuntimeGetVersion(&runtime_version); + if (r != cudaSuccess) { + std::cout << "Couldn't obtain CUDA runtime version (error " << r << "). Exiting.\n"; + return -1; + } + std::cout << "CUDA runtime version: " << runtime_version << "\n"; + + int n = cuda_devices(); + if (n == 0) { + std::cout << "No CUDA hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << " CUDA devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/11 cuda dependency (nvcc)/meson.build b/meson/test cases/cuda/11 cuda dependency (nvcc)/meson.build new file mode 100644 index 000000000..67b6568c5 --- /dev/null +++ b/meson/test cases/cuda/11 cuda dependency (nvcc)/meson.build @@ -0,0 +1,4 @@ +project('cuda dependency', 'cuda') + +subdir('modules') +subdir('version_reqs') diff --git a/meson/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build b/meson/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build new file mode 100644 index 000000000..c0fed8349 --- /dev/null +++ b/meson/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.cu', dependencies: dependency('cuda', modules: ['cublas'])) +test('cudatest', exe) diff --git a/meson/test cases/cuda/11 cuda dependency (nvcc)/modules/prog.cu b/meson/test cases/cuda/11 cuda dependency (nvcc)/modules/prog.cu new file mode 100644 index 000000000..b4af4d41f --- /dev/null +++ b/meson/test cases/cuda/11 cuda dependency (nvcc)/modules/prog.cu @@ -0,0 +1,33 @@ +#include +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No CUDA hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << " CUDA devices.\n"; + + cublasHandle_t handle; + if (cublasCreate(&handle) != CUBLAS_STATUS_SUCCESS) { + std::cout << "cuBLAS initialization failed. Exiting.\n"; + return -1; + } + + std::cout << "Initialized cuBLAS\n"; + if (cublasDestroy(handle) != CUBLAS_STATUS_SUCCESS) { + std::cout << "cuBLAS de-initialization failed. Exiting.\n"; + return -1; + } + + return 0; +} diff --git a/meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/meson.build b/meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/meson.build new file mode 100644 index 000000000..6644c9e2b --- /dev/null +++ b/meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.cu', dependencies: dependency('cuda', version: ['>=10.1'], required: false, disabler: true)) +test('cudatest', exe) diff --git a/meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/prog.cu b/meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/prog.cu new file mode 100644 index 000000000..bc90081e5 --- /dev/null +++ b/meson/test cases/cuda/11 cuda dependency (nvcc)/version_reqs/prog.cu @@ -0,0 +1,29 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + std::cout << "Compiled against CUDA version: " << CUDART_VERSION << "\n"; + + int runtime_version = 0; + switch (cudaError_t r = cudaRuntimeGetVersion(&runtime_version)) { + case cudaSuccess: + std::cout << "CUDA runtime version: " << runtime_version << "\n"; + break; + case cudaErrorNoDevice: + std::cout << "No CUDA hardware found. Exiting.\n"; + return 0; + default: + std::cout << "Couldn't obtain CUDA runtime version (error " << r << "). Exiting.\n"; + return -1; + } + + int n = cuda_devices(); + std::cout << "Found " << n << " CUDA devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/12 cuda dependency (mixed)/kernel.cu b/meson/test cases/cuda/12 cuda dependency (mixed)/kernel.cu new file mode 100644 index 000000000..7daaa6839 --- /dev/null +++ b/meson/test cases/cuda/12 cuda dependency (mixed)/kernel.cu @@ -0,0 +1,8 @@ +#include + +__global__ void kernel (void){ +} + +void do_cuda_stuff(void) { + kernel<<<1,1>>>(); +} diff --git a/meson/test cases/cuda/12 cuda dependency (mixed)/meson.build b/meson/test cases/cuda/12 cuda dependency (mixed)/meson.build new file mode 100644 index 000000000..5df4f846e --- /dev/null +++ b/meson/test cases/cuda/12 cuda dependency (mixed)/meson.build @@ -0,0 +1,4 @@ +project('cuda dependency', 'cpp', 'cuda') + +exe = executable('prog', 'prog.cpp', 'kernel.cu', dependencies: dependency('cuda', modules: ['cublas'])) +test('cudatest', exe) diff --git a/meson/test cases/cuda/12 cuda dependency (mixed)/prog.cpp b/meson/test cases/cuda/12 cuda dependency (mixed)/prog.cpp new file mode 100644 index 000000000..50bb156ad --- /dev/null +++ b/meson/test cases/cuda/12 cuda dependency (mixed)/prog.cpp @@ -0,0 +1,37 @@ +#include +#include +#include + +void do_cuda_stuff(void); + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No CUDA hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << " CUDA devices.\n"; + + do_cuda_stuff(); + + cublasHandle_t handle; + if (cublasCreate(&handle) != CUBLAS_STATUS_SUCCESS) { + std::cout << "cuBLAS initialization failed. Exiting.\n"; + return -1; + } + + std::cout << "Initialized cuBLAS\n"; + if (cublasDestroy(handle) != CUBLAS_STATUS_SUCCESS) { + std::cout << "cuBLAS de-initialization failed. Exiting.\n"; + return -1; + } + + return 0; +} diff --git a/meson/test cases/cuda/13 cuda compiler setting/meson.build b/meson/test cases/cuda/13 cuda compiler setting/meson.build new file mode 100644 index 000000000..19af734af --- /dev/null +++ b/meson/test cases/cuda/13 cuda compiler setting/meson.build @@ -0,0 +1,5 @@ +project('simple', 'cuda', version : '1.0.0') + +exe = executable('prog', 'prog.cu') +test('cudatest', exe) + diff --git a/meson/test cases/cuda/13 cuda compiler setting/nativefile.ini b/meson/test cases/cuda/13 cuda compiler setting/nativefile.ini new file mode 100644 index 000000000..ffaad65fd --- /dev/null +++ b/meson/test cases/cuda/13 cuda compiler setting/nativefile.ini @@ -0,0 +1,5 @@ +[binaries] + +cuda = 'nvcc' + + diff --git a/meson/test cases/cuda/13 cuda compiler setting/prog.cu b/meson/test cases/cuda/13 cuda compiler setting/prog.cu new file mode 100644 index 000000000..b893bd331 --- /dev/null +++ b/meson/test cases/cuda/13 cuda compiler setting/prog.cu @@ -0,0 +1,30 @@ +#include + +int main(void) { + int cuda_devices = 0; + std::cout << "CUDA version: " << CUDART_VERSION << "\n"; + cudaGetDeviceCount(&cuda_devices); + if(cuda_devices == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n"; + cudaDeviceProp props; + cudaGetDeviceProperties(&props, 0); + std::cout << "Properties of device 0.\n\n"; + + std::cout << " Name: " << props.name << "\n"; + std::cout << " Global memory: " << props.totalGlobalMem << "\n"; + std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n"; + std::cout << " Constant memory: " << props.totalConstMem << "\n"; + std::cout << " Block registers: " << props.regsPerBlock << "\n"; + + std::cout << " Warp size: " << props.warpSize << "\n"; + std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n"; + std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n"; + std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n"; + std::cout << "\n"; + + return 0; +} + diff --git a/meson/test cases/cuda/14 cuda has header symbol/meson.build b/meson/test cases/cuda/14 cuda has header symbol/meson.build new file mode 100644 index 000000000..b29c52e58 --- /dev/null +++ b/meson/test cases/cuda/14 cuda has header symbol/meson.build @@ -0,0 +1,27 @@ +project('cuda has header symbol', 'cuda') + +cuda = meson.get_compiler('cuda') + +# C checks +assert (cuda.has_header_symbol('stdio.h', 'int'), 'base types should always be available') +assert (cuda.has_header_symbol('stdio.h', 'printf'), 'printf function not found') +assert (cuda.has_header_symbol('stdio.h', 'FILE'), 'FILE structure not found') +assert (cuda.has_header_symbol('limits.h', 'INT_MAX'), 'INT_MAX define not found') +assert (not cuda.has_header_symbol('limits.h', 'guint64'), 'guint64 is not defined in limits.h') +assert (not cuda.has_header_symbol('stdlib.h', 'FILE'), 'FILE structure is defined in stdio.h, not stdlib.h') +assert (not cuda.has_header_symbol('stdlol.h', 'printf'), 'stdlol.h shouldn\'t exist') +assert (not cuda.has_header_symbol('stdlol.h', 'int'), 'shouldn\'t be able to find "int" with invalid header') + +# C++ checks +assert (cuda.has_header_symbol('iostream', 'std::iostream'), 'iostream not found in iostream.h') +assert (cuda.has_header_symbol('vector', 'std::vector'), 'vector not found in vector.h') +assert (not cuda.has_header_symbol('limits.h', 'std::iostream'), 'iostream should not be defined in limits.h') + +# CUDA checks +assert (cuda.has_header_symbol('cuda.h', 'CUDA_VERSION'), 'CUDA_VERSION not found in cuda.h') +assert (not cuda.has_header_symbol('cuda.h', 'cublasSaxpy'), 'cublasSaxpy is defined in cublas.h, not cuda.h') +if cuda.version().version_compare('>=4.0') + assert (cuda.has_header_symbol('thrust/device_vector.h', 'thrust::device_vector'), 'thrust::device_vector not found') + assert (not cuda.has_header_symbol('thrust/fill.h', 'thrust::sort'), 'thrust::sort should not be defined in thrust/fill.h') +endif + diff --git a/meson/test cases/cuda/15 sanitizer/meson.build b/meson/test cases/cuda/15 sanitizer/meson.build new file mode 100644 index 000000000..367a4e2f8 --- /dev/null +++ b/meson/test cases/cuda/15 sanitizer/meson.build @@ -0,0 +1,4 @@ +project('simple', 'cuda', version : '1.0.0', + default_options: ['b_sanitize=address,undefined']) + +libtests = shared_library('tests', 'prog.cu') diff --git a/meson/test cases/cuda/15 sanitizer/prog.cu b/meson/test cases/cuda/15 sanitizer/prog.cu new file mode 100644 index 000000000..340b07aea --- /dev/null +++ b/meson/test cases/cuda/15 sanitizer/prog.cu @@ -0,0 +1,30 @@ +#include + +int run_tests(void) { + int cuda_devices = 0; + std::cout << "CUDA version: " << CUDART_VERSION << "\n"; + cudaGetDeviceCount(&cuda_devices); + if(cuda_devices == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n"; + cudaDeviceProp props; + cudaGetDeviceProperties(&props, 0); + std::cout << "Properties of device 0.\n\n"; + + std::cout << " Name: " << props.name << "\n"; + std::cout << " Global memory: " << props.totalGlobalMem << "\n"; + std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n"; + std::cout << " Constant memory: " << props.totalConstMem << "\n"; + std::cout << " Block registers: " << props.regsPerBlock << "\n"; + + std::cout << " Warp size: " << props.warpSize << "\n"; + std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n"; + std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n"; + std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n"; + std::cout << "\n"; + + return 0; +} + diff --git a/meson/test cases/cuda/16 multistd/main.cu b/meson/test cases/cuda/16 multistd/main.cu new file mode 100644 index 000000000..a2ffba489 --- /dev/null +++ b/meson/test cases/cuda/16 multistd/main.cu @@ -0,0 +1,20 @@ +#include +#include + +auto cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << "Cuda devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/16 multistd/meson.build b/meson/test cases/cuda/16 multistd/meson.build new file mode 100644 index 000000000..4769a87dd --- /dev/null +++ b/meson/test cases/cuda/16 multistd/meson.build @@ -0,0 +1,4 @@ +project('C++-CUDA multi-std', 'cpp', 'cuda', version : '1.0.0', default_options : ['cpp_std=c++17', 'cuda_std=c++14']) + +exe = executable('prog', 'main.cu') +test('cudatest', exe) diff --git a/meson/test cases/cuda/2 split/lib.cu b/meson/test cases/cuda/2 split/lib.cu new file mode 100644 index 000000000..850d7ddfb --- /dev/null +++ b/meson/test cases/cuda/2 split/lib.cu @@ -0,0 +1,13 @@ +#include +#include + +__global__ void kernel (void){ +} + +int do_cuda_stuff(void) { + kernel<<<1,1>>>(); + + printf("Hello, World!\n"); + return 0; +} + diff --git a/meson/test cases/cuda/2 split/main.cpp b/meson/test cases/cuda/2 split/main.cpp new file mode 100644 index 000000000..ce79003e5 --- /dev/null +++ b/meson/test cases/cuda/2 split/main.cpp @@ -0,0 +1,7 @@ +#include + +int do_cuda_stuff(void); + +int main(void) { + return do_cuda_stuff(); +} diff --git a/meson/test cases/cuda/2 split/meson.build b/meson/test cases/cuda/2 split/meson.build new file mode 100644 index 000000000..51bf6ce23 --- /dev/null +++ b/meson/test cases/cuda/2 split/meson.build @@ -0,0 +1,7 @@ +project('simple', 'cuda', 'cpp') + +exe = executable('prog', 'main.cpp', 'lib.cu') +test('cudatest', exe) + +subdir('static') + diff --git a/meson/test cases/cuda/2 split/static/lib.cu b/meson/test cases/cuda/2 split/static/lib.cu new file mode 100644 index 000000000..c0471d048 --- /dev/null +++ b/meson/test cases/cuda/2 split/static/lib.cu @@ -0,0 +1,13 @@ +#include +#include + +__global__ void kernel (void){ +} + +int do_cuda_stuff() { + kernel<<<1,1>>>(); + + printf("Hello, World!\n"); + return 0; +} + diff --git a/meson/test cases/cuda/2 split/static/libsta.cu b/meson/test cases/cuda/2 split/static/libsta.cu new file mode 100644 index 000000000..c0471d048 --- /dev/null +++ b/meson/test cases/cuda/2 split/static/libsta.cu @@ -0,0 +1,13 @@ +#include +#include + +__global__ void kernel (void){ +} + +int do_cuda_stuff() { + kernel<<<1,1>>>(); + + printf("Hello, World!\n"); + return 0; +} + diff --git a/meson/test cases/cuda/2 split/static/main_static.cpp b/meson/test cases/cuda/2 split/static/main_static.cpp new file mode 100644 index 000000000..ce79003e5 --- /dev/null +++ b/meson/test cases/cuda/2 split/static/main_static.cpp @@ -0,0 +1,7 @@ +#include + +int do_cuda_stuff(void); + +int main(void) { + return do_cuda_stuff(); +} diff --git a/meson/test cases/cuda/2 split/static/meson.build b/meson/test cases/cuda/2 split/static/meson.build new file mode 100644 index 000000000..9078198d5 --- /dev/null +++ b/meson/test cases/cuda/2 split/static/meson.build @@ -0,0 +1,4 @@ +l = static_library('clib', 'lib.cu') +exe = executable('staexe', 'main_static.cpp', + link_with : l) +test('static Cuda test', exe) diff --git a/meson/test cases/cuda/3 cudamodule/meson.build b/meson/test cases/cuda/3 cudamodule/meson.build new file mode 100644 index 000000000..fd5e83fea --- /dev/null +++ b/meson/test cases/cuda/3 cudamodule/meson.build @@ -0,0 +1,70 @@ +project('cudamodule', 'cuda', version : '1.0.0') + +nvcc = meson.get_compiler('cuda') +cuda = import('unstable-cuda') + +arch_flags = cuda.nvcc_arch_flags(nvcc.version(), 'Auto', detected: ['6.0']) +arch_readable = cuda.nvcc_arch_readable(nvcc.version(), 'Auto', detected: ['6.0']) +driver_version = cuda.min_driver_version(nvcc.version()) + +message('NVCC version: ' + nvcc.version()) +message('NVCC flags: ' + ' '.join(arch_flags)) +message('NVCC readable: ' + ' '.join(arch_readable)) +message('Driver version: >=' + driver_version) + +exe = executable('prog', 'prog.cu', cuda_args: arch_flags) +test('cudatest', exe) + + +# +# Assert Series +# + +# Sanity test. +assert(' '.join(cuda.nvcc_arch_flags('11.1', '8.6')) == + '-gencode arch=compute_86,code=sm_86') + +# CUDA Toolkit too old, flag filtered out. +assert(' '.join(cuda.nvcc_arch_flags('11.0', '8.6')) == + '') + +# Named architectures. +assert(' '.join(cuda.nvcc_arch_flags('11.0', 'Ampere')) == + '-gencode arch=compute_80,code=sm_80') + +# Splitting & deduplication. +assert(' '.join(cuda.nvcc_arch_flags('11.0', 'Ampere;8.0,8.0')) == + '-gencode arch=compute_80,code=sm_80') + +# Same, but list supplied as list. +assert(' '.join(cuda.nvcc_arch_flags('11.0', ['Ampere', '8.0', '8.0'])) == + '-gencode arch=compute_80,code=sm_80') + +# Same, but mode set to Auto with detected set to a string with a variety of separators. +assert(' '.join(cuda.nvcc_arch_flags('11.0', 'Auto', detected: 'Ampere;8.0,8.0')) == + '-gencode arch=compute_80,code=sm_80') + +# Same, but detected set to a list. +assert(' '.join(cuda.nvcc_arch_flags('11.0', 'Auto', detected: ['Ampere', '8.0', '8.0'])) == + '-gencode arch=compute_80,code=sm_80') + +# Ask for 8.6 binary with 8.0-level PTX. +assert(' '.join(cuda.nvcc_arch_flags('11.1', '8.6(8.0)')) == + '-gencode arch=compute_80,code=sm_86') + +# Same, but keep the 8.0 PTX. +assert(' '.join(cuda.nvcc_arch_flags('11.1', '8.6(8.0)+PTX')) == + '-gencode arch=compute_80,code=sm_86 -gencode arch=compute_80,code=compute_80') + +# Detected Ampere RTX 3090 on CUDA 10.2, saturate to 7.5+PTX +assert(' '.join(cuda.nvcc_arch_flags('10.2', 'Auto', detected: ['8.0'])) == + '-gencode arch=compute_75,code=sm_75 -gencode arch=compute_75,code=compute_75') + +# Failed to auto-detect with CUDA 10.2, default to common GPUs (3.0;3.5;5.0;5.2;6.0;6.1;7.0;7.5+PTX) +assert(' '.join(cuda.nvcc_arch_flags('10.2', 'Auto', detected: [])) == + '-gencode arch=compute_30,code=sm_30 -gencode arch=compute_35,code=sm_35 '+ + '-gencode arch=compute_50,code=sm_50 -gencode arch=compute_52,code=sm_52 '+ + '-gencode arch=compute_60,code=sm_60 -gencode arch=compute_61,code=sm_61 '+ + '-gencode arch=compute_70,code=sm_70 -gencode arch=compute_75,code=sm_75 '+ + '-gencode arch=compute_75,code=compute_75') + diff --git a/meson/test cases/cuda/3 cudamodule/prog.cu b/meson/test cases/cuda/3 cudamodule/prog.cu new file mode 100644 index 000000000..b893bd331 --- /dev/null +++ b/meson/test cases/cuda/3 cudamodule/prog.cu @@ -0,0 +1,30 @@ +#include + +int main(void) { + int cuda_devices = 0; + std::cout << "CUDA version: " << CUDART_VERSION << "\n"; + cudaGetDeviceCount(&cuda_devices); + if(cuda_devices == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n"; + cudaDeviceProp props; + cudaGetDeviceProperties(&props, 0); + std::cout << "Properties of device 0.\n\n"; + + std::cout << " Name: " << props.name << "\n"; + std::cout << " Global memory: " << props.totalGlobalMem << "\n"; + std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n"; + std::cout << " Constant memory: " << props.totalConstMem << "\n"; + std::cout << " Block registers: " << props.regsPerBlock << "\n"; + + std::cout << " Warp size: " << props.warpSize << "\n"; + std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n"; + std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n"; + std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n"; + std::cout << "\n"; + + return 0; +} + diff --git a/meson/test cases/cuda/4 shared/main.cu b/meson/test cases/cuda/4 shared/main.cu new file mode 100644 index 000000000..12359140e --- /dev/null +++ b/meson/test cases/cuda/4 shared/main.cu @@ -0,0 +1,20 @@ +#include +#include +#include "shared/kernels.h" + + +int main(void) { + int cuda_devices = 0; + cudaGetDeviceCount(&cuda_devices); + if(cuda_devices == 0) { + printf("No Cuda hardware found. Exiting.\n"); + return 0; + } + + if(run_tests() != 0){ + printf("CUDA tests failed! Exiting.\n"); + return 0; + } + + return 0; +} diff --git a/meson/test cases/cuda/4 shared/meson.build b/meson/test cases/cuda/4 shared/meson.build new file mode 100644 index 000000000..532aaebfb --- /dev/null +++ b/meson/test cases/cuda/4 shared/meson.build @@ -0,0 +1,6 @@ +project('simple', 'cuda', version : '1.0.0') + +subdir('shared') + +exe = executable('prog', 'main.cu', dependencies: libkernels) +test('cudatest', exe) diff --git a/meson/test cases/cuda/4 shared/shared/kernels.cu b/meson/test cases/cuda/4 shared/shared/kernels.cu new file mode 100644 index 000000000..41a95536f --- /dev/null +++ b/meson/test cases/cuda/4 shared/shared/kernels.cu @@ -0,0 +1,14 @@ +#include +#include +#include "kernels.h" + + +TAG_HIDDEN __global__ void kernel (void){ +} + +TAG_PUBLIC int run_tests(void) { + kernel<<<1,1>>>(); + + return (int)cudaDeviceSynchronize(); +} + diff --git a/meson/test cases/cuda/4 shared/shared/kernels.h b/meson/test cases/cuda/4 shared/shared/kernels.h new file mode 100644 index 000000000..dbcb99d10 --- /dev/null +++ b/meson/test cases/cuda/4 shared/shared/kernels.h @@ -0,0 +1,86 @@ +/* Include Guard */ +#ifndef SHARED_KERNELS_H +#define SHARED_KERNELS_H + +/** + * Includes + */ + +#include + + +/** + * Defines + */ + +/** + * When building a library, it is a good idea to expose as few as possible + * internal symbols (functions, objects, data structures). Not only does it + * prevent users from relying on private portions of the library that are + * subject to change without any notice, but it can have performance + * advantages: + * + * - It can make shared libraries link faster at dynamic-load time. + * - It can make internal function calls faster by bypassing the PLT. + * + * Thus, the compilation should by default hide all symbols, while the API + * headers will explicitly mark public the few symbols the users are permitted + * to use with a PUBLIC tag. We also define a HIDDEN tag, since it may be + * required to explicitly tag certain C++ types as visible in order for + * exceptions to function correctly. + * + * Additional complexity comes from non-POSIX-compliant systems, which + * artificially impose a requirement on knowing whether we are building or + * using a DLL. + * + * The above commentary and below code is inspired from + * 'https://gcc.gnu.org/wiki/Visibility' + */ + +#if defined(_WIN32) || defined(__CYGWIN__) +# define TAG_ATTRIBUTE_EXPORT __declspec(dllexport) +# define TAG_ATTRIBUTE_IMPORT __declspec(dllimport) +# define TAG_ATTRIBUTE_HIDDEN +#elif __GNUC__ >= 4 +# define TAG_ATTRIBUTE_EXPORT __attribute__((visibility("default"))) +# define TAG_ATTRIBUTE_IMPORT __attribute__((visibility("default"))) +# define TAG_ATTRIBUTE_HIDDEN __attribute__((visibility("hidden"))) +#else +# define TAG_ATTRIBUTE_EXPORT +# define TAG_ATTRIBUTE_IMPORT +# define TAG_ATTRIBUTE_HIDDEN +#endif + +#if TAG_IS_SHARED +# if TAG_IS_BUILDING +# define TAG_PUBLIC TAG_ATTRIBUTE_EXPORT +# else +# define TAG_PUBLIC TAG_ATTRIBUTE_IMPORT +# endif +# define TAG_HIDDEN TAG_ATTRIBUTE_HIDDEN +#else +# define TAG_PUBLIC +# define TAG_HIDDEN +#endif +#define TAG_STATIC static + + + + +/* Extern "C" Guard */ +#ifdef __cplusplus +extern "C" { +#endif + + + +/* Function Prototypes */ +TAG_PUBLIC int run_tests(void); + + + +/* End Extern "C" and Include Guard */ +#ifdef __cplusplus +} +#endif +#endif diff --git a/meson/test cases/cuda/4 shared/shared/meson.build b/meson/test cases/cuda/4 shared/shared/meson.build new file mode 100644 index 000000000..59879166b --- /dev/null +++ b/meson/test cases/cuda/4 shared/shared/meson.build @@ -0,0 +1,5 @@ +libkernels = shared_library('kernels', 'kernels.cu', + cuda_args: ['-DTAG_IS_SHARED=1', '-DTAG_IS_BUILDING=1'], + gnu_symbol_visibility: 'hidden') +libkernels = declare_dependency(compile_args: ['-DTAG_IS_SHARED=1'], + link_with: libkernels) diff --git a/meson/test cases/cuda/5 threads/main.cu b/meson/test cases/cuda/5 threads/main.cu new file mode 100644 index 000000000..12359140e --- /dev/null +++ b/meson/test cases/cuda/5 threads/main.cu @@ -0,0 +1,20 @@ +#include +#include +#include "shared/kernels.h" + + +int main(void) { + int cuda_devices = 0; + cudaGetDeviceCount(&cuda_devices); + if(cuda_devices == 0) { + printf("No Cuda hardware found. Exiting.\n"); + return 0; + } + + if(run_tests() != 0){ + printf("CUDA tests failed! Exiting.\n"); + return 0; + } + + return 0; +} diff --git a/meson/test cases/cuda/5 threads/meson.build b/meson/test cases/cuda/5 threads/meson.build new file mode 100644 index 000000000..2a804a34f --- /dev/null +++ b/meson/test cases/cuda/5 threads/meson.build @@ -0,0 +1,7 @@ +project('simple', 'cuda', version : '1.0.0') + +subdir('shared') + +thread_dep = dependency('threads') +exe = executable('prog', 'main.cu', dependencies: [libkernels, thread_dep]) +test('cudatest', exe) diff --git a/meson/test cases/cuda/5 threads/shared/kernels.cu b/meson/test cases/cuda/5 threads/shared/kernels.cu new file mode 100644 index 000000000..41a95536f --- /dev/null +++ b/meson/test cases/cuda/5 threads/shared/kernels.cu @@ -0,0 +1,14 @@ +#include +#include +#include "kernels.h" + + +TAG_HIDDEN __global__ void kernel (void){ +} + +TAG_PUBLIC int run_tests(void) { + kernel<<<1,1>>>(); + + return (int)cudaDeviceSynchronize(); +} + diff --git a/meson/test cases/cuda/5 threads/shared/kernels.h b/meson/test cases/cuda/5 threads/shared/kernels.h new file mode 100644 index 000000000..dbcb99d10 --- /dev/null +++ b/meson/test cases/cuda/5 threads/shared/kernels.h @@ -0,0 +1,86 @@ +/* Include Guard */ +#ifndef SHARED_KERNELS_H +#define SHARED_KERNELS_H + +/** + * Includes + */ + +#include + + +/** + * Defines + */ + +/** + * When building a library, it is a good idea to expose as few as possible + * internal symbols (functions, objects, data structures). Not only does it + * prevent users from relying on private portions of the library that are + * subject to change without any notice, but it can have performance + * advantages: + * + * - It can make shared libraries link faster at dynamic-load time. + * - It can make internal function calls faster by bypassing the PLT. + * + * Thus, the compilation should by default hide all symbols, while the API + * headers will explicitly mark public the few symbols the users are permitted + * to use with a PUBLIC tag. We also define a HIDDEN tag, since it may be + * required to explicitly tag certain C++ types as visible in order for + * exceptions to function correctly. + * + * Additional complexity comes from non-POSIX-compliant systems, which + * artificially impose a requirement on knowing whether we are building or + * using a DLL. + * + * The above commentary and below code is inspired from + * 'https://gcc.gnu.org/wiki/Visibility' + */ + +#if defined(_WIN32) || defined(__CYGWIN__) +# define TAG_ATTRIBUTE_EXPORT __declspec(dllexport) +# define TAG_ATTRIBUTE_IMPORT __declspec(dllimport) +# define TAG_ATTRIBUTE_HIDDEN +#elif __GNUC__ >= 4 +# define TAG_ATTRIBUTE_EXPORT __attribute__((visibility("default"))) +# define TAG_ATTRIBUTE_IMPORT __attribute__((visibility("default"))) +# define TAG_ATTRIBUTE_HIDDEN __attribute__((visibility("hidden"))) +#else +# define TAG_ATTRIBUTE_EXPORT +# define TAG_ATTRIBUTE_IMPORT +# define TAG_ATTRIBUTE_HIDDEN +#endif + +#if TAG_IS_SHARED +# if TAG_IS_BUILDING +# define TAG_PUBLIC TAG_ATTRIBUTE_EXPORT +# else +# define TAG_PUBLIC TAG_ATTRIBUTE_IMPORT +# endif +# define TAG_HIDDEN TAG_ATTRIBUTE_HIDDEN +#else +# define TAG_PUBLIC +# define TAG_HIDDEN +#endif +#define TAG_STATIC static + + + + +/* Extern "C" Guard */ +#ifdef __cplusplus +extern "C" { +#endif + + + +/* Function Prototypes */ +TAG_PUBLIC int run_tests(void); + + + +/* End Extern "C" and Include Guard */ +#ifdef __cplusplus +} +#endif +#endif diff --git a/meson/test cases/cuda/5 threads/shared/meson.build b/meson/test cases/cuda/5 threads/shared/meson.build new file mode 100644 index 000000000..59879166b --- /dev/null +++ b/meson/test cases/cuda/5 threads/shared/meson.build @@ -0,0 +1,5 @@ +libkernels = shared_library('kernels', 'kernels.cu', + cuda_args: ['-DTAG_IS_SHARED=1', '-DTAG_IS_BUILDING=1'], + gnu_symbol_visibility: 'hidden') +libkernels = declare_dependency(compile_args: ['-DTAG_IS_SHARED=1'], + link_with: libkernels) diff --git a/meson/test cases/cuda/6 std/main.cu b/meson/test cases/cuda/6 std/main.cu new file mode 100644 index 000000000..a2ffba489 --- /dev/null +++ b/meson/test cases/cuda/6 std/main.cu @@ -0,0 +1,20 @@ +#include +#include + +auto cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << "Cuda devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/6 std/meson.build b/meson/test cases/cuda/6 std/meson.build new file mode 100644 index 000000000..69a6868ca --- /dev/null +++ b/meson/test cases/cuda/6 std/meson.build @@ -0,0 +1,4 @@ +project('C++ std', 'cuda', version : '1.0.0', default_options : ['cuda_std=c++14']) + +exe = executable('prog', 'main.cu') +test('cudatest', exe) diff --git a/meson/test cases/cuda/7 static vs runtime/main.cu b/meson/test cases/cuda/7 static vs runtime/main.cu new file mode 100644 index 000000000..bd1dbc4a0 --- /dev/null +++ b/meson/test cases/cuda/7 static vs runtime/main.cu @@ -0,0 +1,20 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << "Cuda devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/7 static vs runtime/meson.build b/meson/test cases/cuda/7 static vs runtime/meson.build new file mode 100644 index 000000000..ab133044d --- /dev/null +++ b/meson/test cases/cuda/7 static vs runtime/meson.build @@ -0,0 +1,4 @@ +project('static msvc runtime', 'cuda', version : '1.0.0', default_options : ['b_vscrt=mtd']) + +exe = executable('prog', 'main.cu') +test('cudatest', exe) diff --git a/meson/test cases/cuda/8 release/main.cu b/meson/test cases/cuda/8 release/main.cu new file mode 100644 index 000000000..bd1dbc4a0 --- /dev/null +++ b/meson/test cases/cuda/8 release/main.cu @@ -0,0 +1,20 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << "Cuda devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/8 release/meson.build b/meson/test cases/cuda/8 release/meson.build new file mode 100644 index 000000000..bdb311d96 --- /dev/null +++ b/meson/test cases/cuda/8 release/meson.build @@ -0,0 +1,4 @@ +project('release', 'cuda', version : '1.0.0', default_options : ['buildtype=release']) + +exe = executable('prog', 'main.cu') +test('cudatest', exe) diff --git a/meson/test cases/cuda/9 optimize for space/main.cu b/meson/test cases/cuda/9 optimize for space/main.cu new file mode 100644 index 000000000..bd1dbc4a0 --- /dev/null +++ b/meson/test cases/cuda/9 optimize for space/main.cu @@ -0,0 +1,20 @@ +#include +#include + +int cuda_devices(void) { + int result = 0; + cudaGetDeviceCount(&result); + return result; +} + + +int main(void) { + int n = cuda_devices(); + if (n == 0) { + std::cout << "No Cuda hardware found. Exiting.\n"; + return 0; + } + + std::cout << "Found " << n << "Cuda devices.\n"; + return 0; +} diff --git a/meson/test cases/cuda/9 optimize for space/meson.build b/meson/test cases/cuda/9 optimize for space/meson.build new file mode 100644 index 000000000..cd6ac055d --- /dev/null +++ b/meson/test cases/cuda/9 optimize for space/meson.build @@ -0,0 +1,4 @@ +project('optimize for space', 'cuda', version : '1.0.0', default_options : ['optimization=s']) + +exe = executable('prog', 'main.cu') +test('cudatest', exe) diff --git a/meson/test cases/cython/1 basic/cytest.py b/meson/test cases/cython/1 basic/cytest.py new file mode 100755 index 000000000..c08ffeed3 --- /dev/null +++ b/meson/test cases/cython/1 basic/cytest.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +from storer import Storer + +s = Storer() + +if s.get_value() != 0: + raise SystemExit('Initial value incorrect.') + +s.set_value(42) + +if s.get_value() != 42: + raise SystemExit('Setting value failed.') + +try: + s.set_value('not a number') + raise SystemExit('Using wrong argument type did not fail.') +except TypeError: + pass diff --git a/meson/test cases/cython/1 basic/libdir/cstorer.pxd b/meson/test cases/cython/1 basic/libdir/cstorer.pxd new file mode 100644 index 000000000..7b730fc75 --- /dev/null +++ b/meson/test cases/cython/1 basic/libdir/cstorer.pxd @@ -0,0 +1,9 @@ + +cdef extern from "storer.h": + ctypedef struct Storer: + pass + + Storer* storer_new(); + void storer_destroy(Storer *s); + int storer_get_value(Storer *s); + void storer_set_value(Storer *s, int v); diff --git a/meson/test cases/cython/1 basic/libdir/meson.build b/meson/test cases/cython/1 basic/libdir/meson.build new file mode 100644 index 000000000..144bb1f14 --- /dev/null +++ b/meson/test cases/cython/1 basic/libdir/meson.build @@ -0,0 +1,8 @@ +slib = py3.extension_module( + 'storer', + 'storer.pyx', + 'storer.c', + dependencies : py3_dep +) + +pydir = meson.current_build_dir() diff --git a/meson/test cases/cython/1 basic/libdir/storer.c b/meson/test cases/cython/1 basic/libdir/storer.c new file mode 100644 index 000000000..0199bb850 --- /dev/null +++ b/meson/test cases/cython/1 basic/libdir/storer.c @@ -0,0 +1,24 @@ +#include"storer.h" +#include + +struct _Storer { + int value; +}; + +Storer* storer_new() { + Storer *s = malloc(sizeof(struct _Storer)); + s->value = 0; + return s; +} + +void storer_destroy(Storer *s) { + free(s); +} + +int storer_get_value(Storer *s) { + return s->value; +} + +void storer_set_value(Storer *s, int v) { + s->value = v; +} diff --git a/meson/test cases/cython/1 basic/libdir/storer.h b/meson/test cases/cython/1 basic/libdir/storer.h new file mode 100644 index 000000000..4f7191711 --- /dev/null +++ b/meson/test cases/cython/1 basic/libdir/storer.h @@ -0,0 +1,8 @@ +#pragma once + +typedef struct _Storer Storer; + +Storer* storer_new(); +void storer_destroy(Storer *s); +int storer_get_value(Storer *s); +void storer_set_value(Storer *s, int v); diff --git a/meson/test cases/cython/1 basic/libdir/storer.pyx b/meson/test cases/cython/1 basic/libdir/storer.pyx new file mode 100644 index 000000000..ed551dc5f --- /dev/null +++ b/meson/test cases/cython/1 basic/libdir/storer.pyx @@ -0,0 +1,16 @@ +cimport cstorer + +cdef class Storer: + cdef cstorer.Storer* _c_storer + + def __cinit__(self): + self._c_storer = cstorer.storer_new() + + def __dealloc__(self): + cstorer.storer_destroy(self._c_storer) + + cpdef int get_value(self): + return cstorer.storer_get_value(self._c_storer) + + cpdef set_value(self, int value): + cstorer.storer_set_value(self._c_storer, value) diff --git a/meson/test cases/cython/1 basic/meson.build b/meson/test cases/cython/1 basic/meson.build new file mode 100644 index 000000000..8c24e2312 --- /dev/null +++ b/meson/test cases/cython/1 basic/meson.build @@ -0,0 +1,20 @@ +project( + 'basic cython project', + ['cython', 'c'], + default_options : ['warning_level=3'] +) + +py_mod = import('python') +py3 = py_mod.find_installation() +py3_dep = py3.dependency(required : false) +if not py3_dep.found() + error('MESON_SKIP_TEST: Python library not found.') +endif + +subdir('libdir') + +test('cython tester', + py3, + args : files('cytest.py'), + env : ['PYTHONPATH=' + pydir] +) diff --git a/meson/test cases/cython/2 generated sources/configure.pyx.in b/meson/test cases/cython/2 generated sources/configure.pyx.in new file mode 100644 index 000000000..1c44f6d42 --- /dev/null +++ b/meson/test cases/cython/2 generated sources/configure.pyx.in @@ -0,0 +1,2 @@ +cpdef func(): + return "Hello, World!" diff --git a/meson/test cases/cython/2 generated sources/g.in b/meson/test cases/cython/2 generated sources/g.in new file mode 100644 index 000000000..1c44f6d42 --- /dev/null +++ b/meson/test cases/cython/2 generated sources/g.in @@ -0,0 +1,2 @@ +cpdef func(): + return "Hello, World!" diff --git a/meson/test cases/cython/2 generated sources/gen.py b/meson/test cases/cython/2 generated sources/gen.py new file mode 100644 index 000000000..5c0a82d56 --- /dev/null +++ b/meson/test cases/cython/2 generated sources/gen.py @@ -0,0 +1,14 @@ +# SPDX-License-Identifier: Apache-2.0 + +import argparse +import textwrap + +parser = argparse.ArgumentParser() +parser.add_argument('output') +args = parser.parse_args() + +with open(args.output, 'w') as f: + f.write(textwrap.dedent('''\ + cpdef func(): + return "Hello, World!" + ''')) diff --git a/meson/test cases/cython/2 generated sources/generator.py b/meson/test cases/cython/2 generated sources/generator.py new file mode 100755 index 000000000..77de85569 --- /dev/null +++ b/meson/test cases/cython/2 generated sources/generator.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 + +import argparse + +parser = argparse.ArgumentParser() +parser.add_argument('input') +parser.add_argument('output') +args = parser.parse_args() + +with open(args.input, 'r') as i, open(args.output, 'w') as o: + o.write(i.read()) diff --git a/meson/test cases/cython/2 generated sources/libdir/gen.py b/meson/test cases/cython/2 generated sources/libdir/gen.py new file mode 100644 index 000000000..5c0a82d56 --- /dev/null +++ b/meson/test cases/cython/2 generated sources/libdir/gen.py @@ -0,0 +1,14 @@ +# SPDX-License-Identifier: Apache-2.0 + +import argparse +import textwrap + +parser = argparse.ArgumentParser() +parser.add_argument('output') +args = parser.parse_args() + +with open(args.output, 'w') as f: + f.write(textwrap.dedent('''\ + cpdef func(): + return "Hello, World!" + ''')) diff --git a/meson/test cases/cython/2 generated sources/libdir/meson.build b/meson/test cases/cython/2 generated sources/libdir/meson.build new file mode 100644 index 000000000..e9259bdad --- /dev/null +++ b/meson/test cases/cython/2 generated sources/libdir/meson.build @@ -0,0 +1,10 @@ +ct2 = custom_target( + 'ct2', + input : 'gen.py', + output : 'ct2.pyx', + command : [py3, '@INPUT@', '@OUTPUT@'], +) + +ct2_ext = py3.extension_module('ct2', ct2, dependencies : py3_dep) + +pydir = meson.current_build_dir() diff --git a/meson/test cases/cython/2 generated sources/meson.build b/meson/test cases/cython/2 generated sources/meson.build new file mode 100644 index 000000000..cfe62602d --- /dev/null +++ b/meson/test cases/cython/2 generated sources/meson.build @@ -0,0 +1,80 @@ +project( + 'generated cython sources', + ['cython'], +) + +py_mod = import('python') +py3 = py_mod.find_installation('python3') +py3_dep = py3.dependency(required : false) +if not py3_dep.found() + error('MESON_SKIP_TEST: Python library not found.') +endif + +ct = custom_target( + 'ct', + input : 'gen.py', + output : 'ct.pyx', + command : [py3, '@INPUT@', '@OUTPUT@'], +) + +ct_ext = py3.extension_module('ct', ct, dependencies : py3_dep) + +test( + 'custom target', + py3, + args : [files('test.py'), 'ct'], + env : ['PYTHONPATH=' + meson.current_build_dir()] +) + +# Test a CustomTargetIndex +cti = custom_target( + 'cti', + input : 'gen.py', + output : 'cti.pyx', + command : [py3, '@INPUT@', '@OUTPUT@'], +) + +cti_ext = py3.extension_module('cti', cti[0], dependencies : py3_dep) + +cf = configure_file( + input : 'configure.pyx.in', + output : 'cf.pyx', + copy : true, +) + +cf_ext = py3.extension_module('cf', cf, dependencies : py3_dep) + +test( + 'configure file', + py3, + args : [files('test.py'), 'cf'], + env : ['PYTHONPATH=' + meson.current_build_dir()] +) + +gen = generator( + find_program('generator.py'), + arguments : ['@INPUT@', '@OUTPUT@'], + output : '@BASENAME@.pyx', +) + +g_ext = py3.extension_module( + 'g', + gen.process('g.in'), + dependencies : py3_dep, +) + +test( + 'generator', + py3, + args : [files('test.py'), 'g'], + env : ['PYTHONPATH=' + meson.current_build_dir()] +) + +subdir('libdir') + +test( + 'custom target in subdir', + py3, + args : [files('test.py'), 'ct2'], + env : ['PYTHONPATH=' + pydir] +) diff --git a/meson/test cases/cython/2 generated sources/test.py b/meson/test cases/cython/2 generated sources/test.py new file mode 100644 index 000000000..307283f26 --- /dev/null +++ b/meson/test cases/cython/2 generated sources/test.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 + +import argparse +import importlib + +parser = argparse.ArgumentParser() +parser.add_argument('mod') +args = parser.parse_args() + +mod = importlib.import_module(args.mod) + +assert mod.func() == 'Hello, World!' diff --git a/meson/test cases/d/1 simple/app.d b/meson/test cases/d/1 simple/app.d new file mode 100644 index 000000000..0be1d2cc7 --- /dev/null +++ b/meson/test cases/d/1 simple/app.d @@ -0,0 +1,8 @@ + +import std.stdio; +import utils; + +void main () +{ + printGreeting ("a Meson D test"); +} diff --git a/meson/test cases/d/1 simple/meson.build b/meson/test cases/d/1 simple/meson.build new file mode 100644 index 000000000..a10b67b10 --- /dev/null +++ b/meson/test cases/d/1 simple/meson.build @@ -0,0 +1,4 @@ +project('D Simple Test', 'd') + +e = executable('dsimpleapp', ['app.d', 'utils.d'], install : true) +test('apptest', e) diff --git a/meson/test cases/d/1 simple/test.json b/meson/test cases/d/1 simple/test.json new file mode 100644 index 000000000..62f907a4c --- /dev/null +++ b/meson/test cases/d/1 simple/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/dsimpleapp"}, + {"type": "pdb", "file": "usr/bin/dsimpleapp", "language": "d"} + ] +} diff --git a/meson/test cases/d/1 simple/utils.d b/meson/test cases/d/1 simple/utils.d new file mode 100644 index 000000000..8645548c5 --- /dev/null +++ b/meson/test cases/d/1 simple/utils.d @@ -0,0 +1,8 @@ + +import std.stdio; +import std.string : format; + +void printGreeting (string name) +{ + writeln ("Hello, I am %s.".format (name)); +} diff --git a/meson/test cases/d/10 d cpp/cppmain.cpp b/meson/test cases/d/10 d cpp/cppmain.cpp new file mode 100644 index 000000000..ff90e3f41 --- /dev/null +++ b/meson/test cases/d/10 d cpp/cppmain.cpp @@ -0,0 +1,18 @@ +extern "C" int rt_init(); +extern "C" int rt_term(); +extern void print_hello(int i); + +int main(int, char**) { + // initialize D runtime + if (!rt_init()) + return 1; + + print_hello(1); + + // terminate D runtime, each initialize call + // must be paired with a terminate call. + if (!rt_term()) + return 1; + + return 0; +} diff --git a/meson/test cases/d/10 d cpp/dmain.d b/meson/test cases/d/10 d cpp/dmain.d new file mode 100644 index 000000000..bece25f6c --- /dev/null +++ b/meson/test cases/d/10 d cpp/dmain.d @@ -0,0 +1,5 @@ +extern (C++) void print_hello(int i); + +void main() { + print_hello(1); +} diff --git a/meson/test cases/d/10 d cpp/libfile.cpp b/meson/test cases/d/10 d cpp/libfile.cpp new file mode 100644 index 000000000..2ea67fc92 --- /dev/null +++ b/meson/test cases/d/10 d cpp/libfile.cpp @@ -0,0 +1,5 @@ +#include + +void print_hello(int i) { + std::cout << "Hello. Here is a number printed with C++: " << i << ".\n"; +} diff --git a/meson/test cases/d/10 d cpp/libfile.d b/meson/test cases/d/10 d cpp/libfile.d new file mode 100644 index 000000000..88cb53e9b --- /dev/null +++ b/meson/test cases/d/10 d cpp/libfile.d @@ -0,0 +1,5 @@ +import std.stdio; + +extern (C++) void print_hello(int i) { + writefln("Hello. Here is a number printed with D: %d", i); +} diff --git a/meson/test cases/d/10 d cpp/meson.build b/meson/test cases/d/10 d cpp/meson.build new file mode 100644 index 000000000..eecb151f4 --- /dev/null +++ b/meson/test cases/d/10 d cpp/meson.build @@ -0,0 +1,13 @@ +project('d and c++', 'd', 'cpp') + +cpp = meson.get_compiler('cpp') + +if cpp.get_id() == 'clang' + error('MESON_SKIP_TEST combining Clang C++ with GDC produces broken executables.') +endif + +e1 = executable('dcpp', 'dmain.d', 'libfile.cpp') +test('dcpp', e1) + +e2 = executable('cppd', 'cppmain.cpp', 'libfile.d') +test('cppd', e2) diff --git a/meson/test cases/d/11 dub/meson.build b/meson/test cases/d/11 dub/meson.build new file mode 100644 index 000000000..d852ca0c2 --- /dev/null +++ b/meson/test cases/d/11 dub/meson.build @@ -0,0 +1,23 @@ +project('dub-example', 'd') + +dub_exe = find_program('dub', required : false) +if not dub_exe.found() + error('MESON_SKIP_TEST: Dub not found') +endif + +urld_dep = dependency('urld', method: 'dub') + +test_exe = executable('test-urld', 'test.d', dependencies: urld_dep) +test('test urld', test_exe) + +# If you want meson to generate/update a dub.json file +dlang = import('dlang') +dlang.generate_dub_file(meson.project_name().to_lower(), meson.source_root(), + authors: 'Meson Team', + description: 'Test executable', + copyright: 'Copyright © 2018, Meson Team', + license: 'MIT', + sourceFiles: 'test.d', + targetType: 'executable', + dependencies: urld_dep +) \ No newline at end of file diff --git a/meson/test cases/d/11 dub/test.d b/meson/test cases/d/11 dub/test.d new file mode 100644 index 000000000..7cf7a1d58 --- /dev/null +++ b/meson/test cases/d/11 dub/test.d @@ -0,0 +1,14 @@ +import std.stdio; +import url; + +void main() { + URL url; + with (url) { + scheme = "soap.beep"; + host = "beep.example.net"; + port = 1772; + path = "/serverinfo/info"; + queryParams.add("token", "my-api-token"); + } + writeln(url); +} \ No newline at end of file diff --git a/meson/test cases/d/2 static library/app.d b/meson/test cases/d/2 static library/app.d new file mode 100644 index 000000000..5d84a6942 --- /dev/null +++ b/meson/test cases/d/2 static library/app.d @@ -0,0 +1,8 @@ + +import libstuff; + +void main () +{ + immutable ret = printLibraryString ("foo"); + assert (ret == 4); +} diff --git a/meson/test cases/d/2 static library/libstuff.d b/meson/test cases/d/2 static library/libstuff.d new file mode 100644 index 000000000..fd3b4d0b4 --- /dev/null +++ b/meson/test cases/d/2 static library/libstuff.d @@ -0,0 +1,9 @@ + +import std.stdio; +import std.string : format; + +int printLibraryString (string str) +{ + writeln ("Static Library says: %s".format (str)); + return 4; +} diff --git a/meson/test cases/d/2 static library/meson.build b/meson/test cases/d/2 static library/meson.build new file mode 100644 index 000000000..88ed2cb8f --- /dev/null +++ b/meson/test cases/d/2 static library/meson.build @@ -0,0 +1,5 @@ +project('D Static Library', 'd') + +lstatic = static_library('stuff', 'libstuff.d', install : true) +es = executable('app_s', 'app.d', link_with : lstatic, install : true) +test('linktest_static', es) diff --git a/meson/test cases/d/2 static library/test.json b/meson/test cases/d/2 static library/test.json new file mode 100644 index 000000000..6abb93409 --- /dev/null +++ b/meson/test cases/d/2 static library/test.json @@ -0,0 +1,7 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/app_s"}, + {"type": "pdb", "file": "usr/bin/app_s", "language": "d"}, + {"type": "file", "file": "usr/lib/libstuff.a"} + ] +} diff --git a/meson/test cases/d/3 shared library/app.d b/meson/test cases/d/3 shared library/app.d new file mode 100644 index 000000000..5d84a6942 --- /dev/null +++ b/meson/test cases/d/3 shared library/app.d @@ -0,0 +1,8 @@ + +import libstuff; + +void main () +{ + immutable ret = printLibraryString ("foo"); + assert (ret == 4); +} diff --git a/meson/test cases/d/3 shared library/libstuff.d b/meson/test cases/d/3 shared library/libstuff.d new file mode 100644 index 000000000..8205490f7 --- /dev/null +++ b/meson/test cases/d/3 shared library/libstuff.d @@ -0,0 +1,14 @@ +import std.stdio; +import std.string : format; + +export int printLibraryString (string str) +{ + writeln ("Library says: %s".format (str)); + return 4; +} + +version (Windows) +{ + import core.sys.windows.dll; + mixin SimpleDllMain; +} diff --git a/meson/test cases/d/3 shared library/libstuff.di b/meson/test cases/d/3 shared library/libstuff.di new file mode 100644 index 000000000..b6454b180 --- /dev/null +++ b/meson/test cases/d/3 shared library/libstuff.di @@ -0,0 +1,3 @@ +module libstuff; + +int printLibraryString (string str); diff --git a/meson/test cases/d/3 shared library/lld-test.py b/meson/test cases/d/3 shared library/lld-test.py new file mode 100644 index 000000000..3f32f59fb --- /dev/null +++ b/meson/test cases/d/3 shared library/lld-test.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +import argparse +import subprocess + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('ldd') + parser.add_argument('bin') + args = parser.parse_args() + + p, o, _ = subprocess.run([args.ldd, args.bin], stdout=subprocess.PIPE) + assert p == 0 + o = o.decode() + assert 'libstuff.so =>' in o, 'libstuff so not in linker path.' + assert 'libstuff.so => not found' not in o, 'libstuff.so not found correctly' + + +if __name__ == '__main__': + main() diff --git a/meson/test cases/d/3 shared library/meson.build b/meson/test cases/d/3 shared library/meson.build new file mode 100644 index 000000000..fa417794c --- /dev/null +++ b/meson/test cases/d/3 shared library/meson.build @@ -0,0 +1,26 @@ +project('D Shared Library', 'd') + +dc = meson.get_compiler('d') +if dc.get_id() == 'gcc' + if dc.version().version_compare('< 8') + error('MESON_SKIP_TEST: GDC < 8.0 can not build shared libraries') + endif +endif + +subdir('sub') +ed = executable('app_d', 'app.d', link_with : ldyn, install : true) +test('linktest_dyn', ed) + +# test D attributes for pkg-config +pkgc = import('pkgconfig') +pkgc.generate(name: 'test', + libraries: ldyn, + subdirs: 'd/stuff', + description: 'A test of D attributes to pkgconfig.generate.', + d_module_versions: ['Use_Static'] +) + +ldd = find_program('ldd', required : false) +if ldd.found() + test('ldd-test.py', ed) +endif diff --git a/meson/test cases/d/3 shared library/sub/libstuff.d b/meson/test cases/d/3 shared library/sub/libstuff.d new file mode 100644 index 000000000..8205490f7 --- /dev/null +++ b/meson/test cases/d/3 shared library/sub/libstuff.d @@ -0,0 +1,14 @@ +import std.stdio; +import std.string : format; + +export int printLibraryString (string str) +{ + writeln ("Library says: %s".format (str)); + return 4; +} + +version (Windows) +{ + import core.sys.windows.dll; + mixin SimpleDllMain; +} diff --git a/meson/test cases/d/3 shared library/sub/meson.build b/meson/test cases/d/3 shared library/sub/meson.build new file mode 100644 index 000000000..fb4b99661 --- /dev/null +++ b/meson/test cases/d/3 shared library/sub/meson.build @@ -0,0 +1,2 @@ +ldyn = shared_library('stuff', 'libstuff.d', install : true) + diff --git a/meson/test cases/d/3 shared library/test.json b/meson/test cases/d/3 shared library/test.json new file mode 100644 index 000000000..50eb9cbdd --- /dev/null +++ b/meson/test cases/d/3 shared library/test.json @@ -0,0 +1,11 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/app_d"}, + {"type": "pdb", "file": "usr/bin/app_d", "language": "d"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/stuff"}, + {"type": "pdb", "file": "usr/bin/stuff", "language": "d"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/stuff"}, + {"type": "file", "platform": "msvc", "file": "usr/lib/stuff.lib"}, + {"type": "file", "file": "usr/lib/pkgconfig/test.pc"} + ] +} diff --git a/meson/test cases/d/4 library versions/lib.d b/meson/test cases/d/4 library versions/lib.d new file mode 100644 index 000000000..f1e177d99 --- /dev/null +++ b/meson/test cases/d/4 library versions/lib.d @@ -0,0 +1,16 @@ + +import std.stdio; +import std.string : format; + +@safe +export int printLibraryString (string str) +{ + writeln ("Library says: %s".format (str)); + return 4; +} + +version (Windows) +{ + import core.sys.windows.dll; + mixin SimpleDllMain; +} diff --git a/meson/test cases/d/4 library versions/meson.build b/meson/test cases/d/4 library versions/meson.build new file mode 100644 index 000000000..c745b9227 --- /dev/null +++ b/meson/test cases/d/4 library versions/meson.build @@ -0,0 +1,25 @@ +project('D library versions', 'd') + +dc = meson.get_compiler('d') +if dc.get_id() == 'gcc' + if dc.version().version_compare('< 8') + error('MESON_SKIP_TEST: GDC < 8.0 can not build shared libraries') + endif +endif + +shared_library('some', 'lib.d', + version : '1.2.3', + soversion : '0', + install : true) + +shared_library('noversion', 'lib.d', + install : true) + +shared_library('onlyversion', 'lib.d', + version : '1.4.5', + install : true) + +shared_library('onlysoversion', 'lib.d', + # Also test that int soversion is acceptable + soversion : 5, + install : true) diff --git a/meson/test cases/d/4 library versions/test.json b/meson/test cases/d/4 library versions/test.json new file mode 100644 index 000000000..23c95dd52 --- /dev/null +++ b/meson/test cases/d/4 library versions/test.json @@ -0,0 +1,25 @@ +{ + "installed": [ + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/some"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/some", "version": "0"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/some", "version": "1.2.3"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/noversion"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlyversion"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlyversion", "version": "1"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlyversion", "version": "1.4.5"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlysoversion"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/onlysoversion", "version": "5"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/noversion"}, + {"type": "pdb", "file": "usr/bin/noversion", "language": "d"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/onlysoversion", "version": "5"}, + {"type": "pdb", "file": "usr/bin/onlysoversion", "version": "5", "language": "d"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/onlyversion", "version": "1"}, + {"type": "pdb", "file": "usr/bin/onlyversion", "version": "1", "language": "d"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/some", "version": "0"}, + {"type": "pdb", "file": "usr/bin/some", "version": "0", "language": "d"}, + {"type": "implib", "file": "usr/lib/noversion"}, + {"type": "implib", "file": "usr/lib/onlysoversion"}, + {"type": "implib", "file": "usr/lib/onlyversion"}, + {"type": "implib", "file": "usr/lib/some"} + ] +} diff --git a/meson/test cases/d/5 mixed/app.d b/meson/test cases/d/5 mixed/app.d new file mode 100644 index 000000000..6ab5d97d6 --- /dev/null +++ b/meson/test cases/d/5 mixed/app.d @@ -0,0 +1,8 @@ + +extern(C) int printLibraryString(const char *str); + +void main () +{ + immutable ret = printLibraryString ("C foo"); + assert (ret == 3); +} diff --git a/meson/test cases/d/5 mixed/libstuff.c b/meson/test cases/d/5 mixed/libstuff.c new file mode 100644 index 000000000..92d6600ce --- /dev/null +++ b/meson/test cases/d/5 mixed/libstuff.c @@ -0,0 +1,18 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +#include + +int DLL_PUBLIC printLibraryString(const char *str) +{ + printf("C library says: %s", str); + return 3; +} diff --git a/meson/test cases/d/5 mixed/meson.build b/meson/test cases/d/5 mixed/meson.build new file mode 100644 index 000000000..3dad66d0c --- /dev/null +++ b/meson/test cases/d/5 mixed/meson.build @@ -0,0 +1,9 @@ +project('Mixing C and D', 'd', 'c') + +ldyn = shared_library('stuff', 'libstuff.c', install : true) +ed = executable('appdc_d', 'app.d', link_with : ldyn, install : true) +test('linktest_cdyn', ed) + +lstatic = static_library('stuff', 'libstuff.c', install : true) +es = executable('appdc_s', 'app.d', link_with : lstatic, install : true) +test('linktest_cstatic', es) diff --git a/meson/test cases/d/5 mixed/test.json b/meson/test cases/d/5 mixed/test.json new file mode 100644 index 000000000..c95d0ca0c --- /dev/null +++ b/meson/test cases/d/5 mixed/test.json @@ -0,0 +1,13 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/appdc_d"}, + {"type": "pdb", "file": "usr/bin/appdc_d", "language": "d"}, + {"type": "exe", "file": "usr/bin/appdc_s"}, + {"type": "pdb", "file": "usr/bin/appdc_s", "language": "d"}, + {"type": "file", "file": "usr/lib/libstuff.a"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/stuff"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/stuff"}, + {"type": "pdb", "file": "usr/bin/stuff", "language": "c"}, + {"type": "file", "platform": "msvc", "file": "usr/lib/stuff.lib"} + ] +} diff --git a/meson/test cases/d/6 unittest/app.d b/meson/test cases/d/6 unittest/app.d new file mode 100644 index 000000000..71c641415 --- /dev/null +++ b/meson/test cases/d/6 unittest/app.d @@ -0,0 +1,38 @@ + +import std.stdio; + +uint getFour () +{ + auto getTwo () + { + return 1 + 1; + } + + return getTwo () + getTwo (); +} + +void main () +{ + import core.stdc.stdlib : exit; + + writeln ("Four: ", getFour ()); + exit (4); +} + +unittest +{ + writeln ("TEST"); + import core.stdc.stdlib : exit; + import second_unit; + + assert (getFour () > 2); + assert (getFour () == 4); + + // this is a regression test for https://github.com/mesonbuild/meson/issues/3337 + secondModuleTestFunc (); + + // we explicitly terminate here to give the unittest program a different exit + // code than the main application has. + // (this prevents the regular main() from being executed) + exit (0); +} diff --git a/meson/test cases/d/6 unittest/meson.build b/meson/test cases/d/6 unittest/meson.build new file mode 100644 index 000000000..49a070047 --- /dev/null +++ b/meson/test cases/d/6 unittest/meson.build @@ -0,0 +1,8 @@ +project('D Unittests', 'd') + +e = executable('dapp', ['app.d', 'second_unit.d'], install : true) +test('dapp_run', e, should_fail: true) + +e_test = executable('dapp_test', ['app.d', 'second_unit.d'], + d_unittest: true) +test('dapp_test', e_test) diff --git a/meson/test cases/d/6 unittest/second_unit.d b/meson/test cases/d/6 unittest/second_unit.d new file mode 100644 index 000000000..fdb62a918 --- /dev/null +++ b/meson/test cases/d/6 unittest/second_unit.d @@ -0,0 +1,10 @@ + +void secondModuleTestFunc () +{ + import std.stdio : writeln; + + version (unittest) + writeln ("Hello!"); + else + assert (0); +} diff --git a/meson/test cases/d/6 unittest/test.json b/meson/test cases/d/6 unittest/test.json new file mode 100644 index 000000000..adc4d75a1 --- /dev/null +++ b/meson/test cases/d/6 unittest/test.json @@ -0,0 +1,6 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/dapp"}, + {"type": "pdb", "file": "usr/bin/dapp", "language": "d"} + ] +} diff --git a/meson/test cases/d/7 multilib/app.d b/meson/test cases/d/7 multilib/app.d new file mode 100644 index 000000000..892596ad9 --- /dev/null +++ b/meson/test cases/d/7 multilib/app.d @@ -0,0 +1,9 @@ + +import say1; +import say2; + +void main () +{ + assert (sayHello1 ("Dave") == 4); + assert (sayHello2 ("HAL 9000") == 8); +} diff --git a/meson/test cases/d/7 multilib/meson.build b/meson/test cases/d/7 multilib/meson.build new file mode 100644 index 000000000..1879c083f --- /dev/null +++ b/meson/test cases/d/7 multilib/meson.build @@ -0,0 +1,24 @@ +project('D Multiple Versioned Shared Libraries', 'd') + +dc = meson.get_compiler('d') +if dc.get_id() == 'gcc' + if dc.version().version_compare('< 8') + error('MESON_SKIP_TEST: GDC < 8.0 can not build shared libraries') + endif +endif + +ldyn1 = shared_library('say1', + 'say1.d', + install: true, + version : '1.2.3', + soversion : '0' +) +ldyn2 = shared_library('say2', + 'say2.d', + install: true, + version : '1.2.4', + soversion : '1' +) + +ed = executable('app_d', 'app.d', link_with: [ldyn1, ldyn2], install: true) +test('multilink_test', ed) diff --git a/meson/test cases/d/7 multilib/say1.d b/meson/test cases/d/7 multilib/say1.d new file mode 100644 index 000000000..605fd2355 --- /dev/null +++ b/meson/test cases/d/7 multilib/say1.d @@ -0,0 +1,15 @@ + +import std.stdio; +import std.string : format; + +export int sayHello1 (string str) +{ + writeln ("Hello %s from library 1.".format (str)); + return 4; +} + +version (Windows) +{ + import core.sys.windows.dll; + mixin SimpleDllMain; +} diff --git a/meson/test cases/d/7 multilib/say1.di b/meson/test cases/d/7 multilib/say1.di new file mode 100644 index 000000000..8a9ff02da --- /dev/null +++ b/meson/test cases/d/7 multilib/say1.di @@ -0,0 +1 @@ +int sayHello1 (string str); diff --git a/meson/test cases/d/7 multilib/say2.d b/meson/test cases/d/7 multilib/say2.d new file mode 100644 index 000000000..7270ebd22 --- /dev/null +++ b/meson/test cases/d/7 multilib/say2.d @@ -0,0 +1,15 @@ + +import std.stdio; +import std.string : format; + +export int sayHello2 (string str) +{ + writeln ("Hello %s from library 2.".format (str)); + return 8; +} + +version (Windows) +{ + import core.sys.windows.dll; + mixin SimpleDllMain; +} diff --git a/meson/test cases/d/7 multilib/say2.di b/meson/test cases/d/7 multilib/say2.di new file mode 100644 index 000000000..da712f0de --- /dev/null +++ b/meson/test cases/d/7 multilib/say2.di @@ -0,0 +1 @@ +int sayHello2 (string str); diff --git a/meson/test cases/d/7 multilib/test.json b/meson/test cases/d/7 multilib/test.json new file mode 100644 index 000000000..5944ae0e1 --- /dev/null +++ b/meson/test cases/d/7 multilib/test.json @@ -0,0 +1,18 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/app_d"}, + {"type": "pdb", "file": "usr/bin/app_d", "language": "d"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say1"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say1", "version": "0"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say1", "version": "1.2.3"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say2"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say2", "version": "1"}, + {"type": "shared_lib", "platform": "gcc", "file": "usr/lib/say2", "version": "1.2.4"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/say1", "version": "0"}, + {"type": "pdb", "file": "usr/bin/say1", "version": "0", "language": "d"}, + {"type": "shared_lib", "platform": "msvc", "file": "usr/bin/say2", "version": "1"}, + {"type": "pdb", "file": "usr/bin/say2", "version": "1", "language": "d"}, + {"type": "implib", "file": "usr/lib/say1"}, + {"type": "implib", "file": "usr/lib/say2"} + ] +} diff --git a/meson/test cases/d/8 has multi arguments/meson.build b/meson/test cases/d/8 has multi arguments/meson.build new file mode 100644 index 000000000..08970572c --- /dev/null +++ b/meson/test cases/d/8 has multi arguments/meson.build @@ -0,0 +1,8 @@ +project('D has arguments test', 'd') + +compiler = meson.get_compiler('d') + +assert(compiler.compiles('int i;'), 'Basic code test does not compile: ' + compiler.get_id()) +assert(compiler.has_multi_arguments(['-I.', '-J.']), 'Multi argument test does not work: ' + compiler.get_id()) +assert(compiler.has_argument('-I.'), 'Basic argument test does not work: ' + compiler.get_id()) +assert(compiler.has_argument('-flag_a_d_compiler_definitely_does_not_have') == false, 'Basic argument test does not work: ' + compiler.get_id()) diff --git a/meson/test cases/d/9 features/app.d b/meson/test cases/d/9 features/app.d new file mode 100644 index 000000000..05c56ca5f --- /dev/null +++ b/meson/test cases/d/9 features/app.d @@ -0,0 +1,82 @@ + +import std.stdio; +import std.array : split; +import std.string : strip; + +import extra; + +auto getMenu () +{ + auto foods = import ("food.txt").strip.split ("\n"); + return foods; +} + +auto getPeople () +{ + return import ("people.txt").strip.split ("\n"); +} + +void main (string[] args) +{ + import std.array : join; + import core.stdc.stdlib : exit; + + immutable request = args[1]; + if (request == "menu") { + version (No_Menu) { + } else { + writeln ("On the menu: ", getMenu.join (", ")); + exit (0); + } + } + + version (With_People) { + if (request == "people") { + writeln ("People: ", getPeople.join (", ")); + + // only exit successfully if the second module also had its module version set. + // this checks for issue https://github.com/mesonbuild/meson/issues/3337 + if (secondModulePeopleVersionSet ()) + exit (0); + exit (1); + } + } + + version (With_VersionInteger) + version(3) exit(0); + + version (With_Debug) + debug exit(0); + + version (With_DebugInteger) + debug(3) exit(0); + + version (With_DebugIdentifier) + debug(DebugIdentifier) exit(0); + + version (With_DebugAll) { + int dbg = 0; + debug dbg++; + debug(2) dbg++; + debug(3) dbg++; + debug(4) dbg++; + debug(DebugIdentifier) dbg++; + + if (dbg == 5) + exit(0); + } + + // we fail here + exit (1); +} + +unittest +{ + writeln ("TEST"); + import core.stdc.stdlib : exit; + + writeln(getMenu); + assert (getMenu () == ["Spam", "Eggs", "Spam", "Baked Beans", "Spam", "Spam"]); + + exit (0); +} diff --git a/meson/test cases/d/9 features/data/food.txt b/meson/test cases/d/9 features/data/food.txt new file mode 100644 index 000000000..8275dd02c --- /dev/null +++ b/meson/test cases/d/9 features/data/food.txt @@ -0,0 +1,6 @@ +Spam +Eggs +Spam +Baked Beans +Spam +Spam diff --git a/meson/test cases/d/9 features/data/people.txt b/meson/test cases/d/9 features/data/people.txt new file mode 100644 index 000000000..abbae060b --- /dev/null +++ b/meson/test cases/d/9 features/data/people.txt @@ -0,0 +1,5 @@ +Rick +Morty +Summer +Beth +Jerry diff --git a/meson/test cases/d/9 features/extra.d b/meson/test cases/d/9 features/extra.d new file mode 100644 index 000000000..832b29207 --- /dev/null +++ b/meson/test cases/d/9 features/extra.d @@ -0,0 +1,9 @@ + +auto secondModulePeopleVersionSet () +{ + version (With_People) { + return true; + } else { + return false; + } +} diff --git a/meson/test cases/d/9 features/meson.build b/meson/test cases/d/9 features/meson.build new file mode 100644 index 000000000..06f03414d --- /dev/null +++ b/meson/test cases/d/9 features/meson.build @@ -0,0 +1,106 @@ +project('D Features', 'd', default_options : ['debug=false']) + +# ONLY FOR BACKWARDS COMPATIBILITY. +# DO NOT DO THIS IN NEW CODE! +# USE include_directories() INSTEAD OF BUILDING +# STRINGS TO PATHS MANUALLY! +data_dir = join_paths(meson.current_source_dir(), 'data') + +test_src = ['app.d', 'extra.d'] + +e_plain_bcompat = executable('dapp_menu_bcompat', + test_src, + d_import_dirs: [data_dir] +) +test('dapp_menu_t_fail_bcompat', e_plain_bcompat, should_fail: true) +test('dapp_menu_t_bcompat', e_plain_bcompat, args: ['menu']) + +# directory for data +# This is the correct way to do this. +data_dir = include_directories('data') + +e_plain = executable('dapp_menu', + test_src, + d_import_dirs: [data_dir] +) +test('dapp_menu_t_fail', e_plain, should_fail: true) +test('dapp_menu_t', e_plain, args: ['menu']) + + +# test feature versions and string imports +e_versions = executable('dapp_versions', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['No_Menu', 'With_People'] +) +test('dapp_versions_t_fail', e_versions, args: ['menu'], should_fail: true) +test('dapp_versions_t', e_versions, args: ['people']) + +# test everything and unittests +e_test = executable('dapp_test', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['No_Menu', 'With_People'], + d_unittest: true +) +test('dapp_test', e_test) + +# test version level +e_version_int = executable('dapp_version_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 3], +) +test('dapp_version_int_t', e_version_int, args: ['debug']) + +# test version level failure +e_version_int_fail = executable('dapp_version_int_fail', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 2], +) +test('dapp_version_int_t_fail', e_version_int_fail, args: ['debug'], should_fail: true) + +# test debug conditions: disabled +e_no_debug = executable('dapp_no_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], +) +test('dapp_no_debug_t_fail', e_no_debug, args: ['debug'], should_fail: true) + +# test debug conditions: enabled +e_debug = executable('dapp_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], + d_debug: 1, +) +test('dapp_debug_t', e_debug, args: ['debug']) + +# test debug conditions: integer +e_debug_int = executable('dapp_debug_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugInteger'], + d_debug: 3, +) +test('dapp_debug_int_t', e_debug_int, args: ['debug']) + +# test debug conditions: identifier +e_debug_ident = executable('dapp_debug_ident', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugIdentifier'], + d_debug: 'DebugIdentifier', +) +test('dapp_debug_ident_t', e_debug_ident, args: ['debug']) + +# test with all debug conditions at once, and with redundant values +e_debug_all = executable('dapp_debug_all', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugAll'], + d_debug: ['4', 'DebugIdentifier', 2, 'DebugIdentifierUnused'], +) +test('dapp_debug_all_t', e_debug_all, args: ['debug']) diff --git a/meson/test cases/failing build/1 vala c werror/meson.build b/meson/test cases/failing build/1 vala c werror/meson.build new file mode 100644 index 000000000..736d7aa43 --- /dev/null +++ b/meson/test cases/failing build/1 vala c werror/meson.build @@ -0,0 +1,10 @@ +project('valatest', 'c', default_options : 'werror=true') + +if find_program('valac', required : false).found() + add_languages('vala') + valadeps = [dependency('glib-2.0'), dependency('gobject-2.0')] + # Must fail due to -Werror and unused variable in C file + executable('valaprog', 'prog.vala', 'unused-var.c', dependencies : valadeps) +else + executable('failprog', 'unused-var.c') +endif diff --git a/meson/test cases/failing build/1 vala c werror/prog.vala b/meson/test cases/failing build/1 vala c werror/prog.vala new file mode 100644 index 000000000..638e77660 --- /dev/null +++ b/meson/test cases/failing build/1 vala c werror/prog.vala @@ -0,0 +1,7 @@ +class MainProg : GLib.Object { + + public static int main(string[] args) { + stdout.printf("Vala is working.\n"); + return 0; + } +} diff --git a/meson/test cases/failing build/1 vala c werror/unused-var.c b/meson/test cases/failing build/1 vala c werror/unused-var.c new file mode 100644 index 000000000..6b85078c9 --- /dev/null +++ b/meson/test cases/failing build/1 vala c werror/unused-var.c @@ -0,0 +1,8 @@ +#warning "something" + +int +somelib(void) +{ + int unused_var; + return 33; +} diff --git a/meson/test cases/failing build/2 hidden symbol/bob.c b/meson/test cases/failing build/2 hidden symbol/bob.c new file mode 100644 index 000000000..9a3325a81 --- /dev/null +++ b/meson/test cases/failing build/2 hidden symbol/bob.c @@ -0,0 +1,5 @@ +#include"bob.h" + +int hidden_function() { + return 7; +} diff --git a/meson/test cases/failing build/2 hidden symbol/bob.h b/meson/test cases/failing build/2 hidden symbol/bob.h new file mode 100644 index 000000000..947f6eec4 --- /dev/null +++ b/meson/test cases/failing build/2 hidden symbol/bob.h @@ -0,0 +1,3 @@ +#pragma once + +int hidden_function(); diff --git a/meson/test cases/failing build/2 hidden symbol/bobuser.c b/meson/test cases/failing build/2 hidden symbol/bobuser.c new file mode 100644 index 000000000..89272ed6c --- /dev/null +++ b/meson/test cases/failing build/2 hidden symbol/bobuser.c @@ -0,0 +1,5 @@ +#include"bob.h" + +int main(int argc, char **argv) { + return hidden_function(); +} diff --git a/meson/test cases/failing build/2 hidden symbol/meson.build b/meson/test cases/failing build/2 hidden symbol/meson.build new file mode 100644 index 000000000..f7c38e353 --- /dev/null +++ b/meson/test cases/failing build/2 hidden symbol/meson.build @@ -0,0 +1,11 @@ +project('hidden symbol', 'c') + +if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' + error('MESON_SKIP_TEST -fvisibility=hidden does not work for PE files.') +endif + +l = shared_library('bob', 'bob.c', + gnu_symbol_visibility: 'hidden') + +executable('bobuser', 'bobuser.c', + link_with: l) diff --git a/meson/test cases/failing build/3 pch disabled/c/meson.build b/meson/test cases/failing build/3 pch disabled/c/meson.build new file mode 100644 index 000000000..1739126df --- /dev/null +++ b/meson/test cases/failing build/3 pch disabled/c/meson.build @@ -0,0 +1,2 @@ +exe = executable('prog', 'prog.c', +c_pch : ['pch/prog_pch.c', 'pch/prog.h']) diff --git a/meson/test cases/failing build/3 pch disabled/c/pch/prog.h b/meson/test cases/failing build/3 pch disabled/c/pch/prog.h new file mode 100644 index 000000000..354499acd --- /dev/null +++ b/meson/test cases/failing build/3 pch disabled/c/pch/prog.h @@ -0,0 +1 @@ +#include diff --git a/meson/test cases/failing build/3 pch disabled/c/pch/prog_pch.c b/meson/test cases/failing build/3 pch disabled/c/pch/prog_pch.c new file mode 100644 index 000000000..49605057b --- /dev/null +++ b/meson/test cases/failing build/3 pch disabled/c/pch/prog_pch.c @@ -0,0 +1,5 @@ +#if !defined(_MSC_VER) +#error "This file is only for use with MSVC." +#endif + +#include "prog.h" diff --git a/meson/test cases/failing build/3 pch disabled/c/prog.c b/meson/test cases/failing build/3 pch disabled/c/prog.c new file mode 100644 index 000000000..0ce3d0a24 --- /dev/null +++ b/meson/test cases/failing build/3 pch disabled/c/prog.c @@ -0,0 +1,10 @@ +// No includes here, they need to come from the PCH + +void func() { + fprintf(stdout, "This is a function that fails if stdio is not #included.\n"); +} + +int main(int argc, char **argv) { + return 0; +} + diff --git a/meson/test cases/failing build/3 pch disabled/meson.build b/meson/test cases/failing build/3 pch disabled/meson.build new file mode 100644 index 000000000..0a8fa6793 --- /dev/null +++ b/meson/test cases/failing build/3 pch disabled/meson.build @@ -0,0 +1,5 @@ +# Disable PCH usage to make sure backends respect this setting. +# Since the .c file requires PCH usage (it does not include necessary +# headers itself), the build should fail. +project('pch test', 'c', default_options: ['b_pch=false']) +subdir('c') diff --git a/meson/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp b/meson/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp new file mode 100644 index 000000000..a5f09bed1 --- /dev/null +++ b/meson/test cases/failing build/4 cmake subproject isolation/incDir/fileA.hpp @@ -0,0 +1,3 @@ +#pragma once + +#define SOME_DEFINE " World" diff --git a/meson/test cases/failing build/4 cmake subproject isolation/main.cpp b/meson/test cases/failing build/4 cmake subproject isolation/main.cpp new file mode 100644 index 000000000..95079615a --- /dev/null +++ b/meson/test cases/failing build/4 cmake subproject isolation/main.cpp @@ -0,0 +1,10 @@ +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/meson/test cases/failing build/4 cmake subproject isolation/meson.build b/meson/test cases/failing build/4 cmake subproject isolation/meson.build new file mode 100644 index 000000000..e60633595 --- /dev/null +++ b/meson/test cases/failing build/4 cmake subproject isolation/meson.build @@ -0,0 +1,17 @@ +project('subproject isolation', ['c', 'cpp']) + +if not find_program('cmake', required: false).found() + error('MESON_SKIP_TEST CMake is not installed') +endif + +incdir = meson.source_root() / 'incDir' + +cm = import('cmake') + +# This should generate a warning and the include dir should be skipped. +sub_pro = cm.subproject('cmMod', cmake_options : [ '-DMESON_INC_DIR=' + incdir ]) +sub_dep = sub_pro.dependency('cmModLib++') + +# Since the include dir is skipped, the compilation of this project should fail. +exe1 = executable('main', ['main.cpp'], dependencies: [sub_dep]) +test('test1', exe1) diff --git a/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt b/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt new file mode 100644 index 000000000..852dd09a8 --- /dev/null +++ b/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmMod) +set (CMAKE_CXX_STANDARD 14) + +include_directories(${CMAKE_CURRENT_BINARY_DIR} ${MESON_INC_DIR}) + +add_library(cmModLib++ SHARED cmMod.cpp) +include(GenerateExportHeader) +generate_export_header(cmModLib++) diff --git a/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp b/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp new file mode 100644 index 000000000..a668203d1 --- /dev/null +++ b/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.cpp @@ -0,0 +1,12 @@ +#include "cmMod.hpp" +#include "fileA.hpp" + +using namespace std; + +cmModClass::cmModClass(string foo) { + str = foo + SOME_DEFINE; +} + +string cmModClass::getStr() const { + return str; +} diff --git a/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp b/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp new file mode 100644 index 000000000..0e6dc0484 --- /dev/null +++ b/meson/test cases/failing build/4 cmake subproject isolation/subprojects/cmMod/cmMod.hpp @@ -0,0 +1,14 @@ +#pragma once + +#include "cmmodlib++_export.h" +#include + +class CMMODLIB___EXPORT cmModClass { +private: + std::string str; + +public: + cmModClass(std::string foo); + + std::string getStr() const; +}; diff --git a/meson/test cases/failing build/5 failed pickled/false.py b/meson/test cases/failing build/5 failed pickled/false.py new file mode 100644 index 000000000..865aeb967 --- /dev/null +++ b/meson/test cases/failing build/5 failed pickled/false.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 + +import sys +sys.exit(1) diff --git a/meson/test cases/failing build/5 failed pickled/meson.build b/meson/test cases/failing build/5 failed pickled/meson.build new file mode 100644 index 000000000..924502066 --- /dev/null +++ b/meson/test cases/failing build/5 failed pickled/meson.build @@ -0,0 +1,7 @@ +project('failed pickled command') + +custom_target('failure', + command: [find_program('false.py'), '\n'], + output: 'output.txt', + build_by_default: true, +) diff --git a/meson/test cases/failing test/1 trivial/main.c b/meson/test cases/failing test/1 trivial/main.c new file mode 100644 index 000000000..3e70e5079 --- /dev/null +++ b/meson/test cases/failing test/1 trivial/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 1; +} diff --git a/meson/test cases/failing test/1 trivial/meson.build b/meson/test cases/failing test/1 trivial/meson.build new file mode 100644 index 000000000..ed5a3d2b3 --- /dev/null +++ b/meson/test cases/failing test/1 trivial/meson.build @@ -0,0 +1,3 @@ +project('trivial', 'c') + +test('My Test', executable('main', 'main.c')) diff --git a/meson/test cases/failing test/2 signal/main.c b/meson/test cases/failing test/2 signal/main.c new file mode 100644 index 000000000..2ee1d8075 --- /dev/null +++ b/meson/test cases/failing test/2 signal/main.c @@ -0,0 +1,6 @@ +#include +#include + +int main(void) { + kill(getpid(), SIGSEGV); +} diff --git a/meson/test cases/failing test/2 signal/meson.build b/meson/test cases/failing test/2 signal/meson.build new file mode 100644 index 000000000..9d84c263e --- /dev/null +++ b/meson/test cases/failing test/2 signal/meson.build @@ -0,0 +1,7 @@ +project('signal', 'c') + +if build_machine.system() == 'windows' + error('MESON_SKIP_TEST test is not compatible with MS Windows.') +else + test('My Signal Test', executable('main', 'main.c')) +endif diff --git a/meson/test cases/failing test/3 ambiguous/main.c b/meson/test cases/failing test/3 ambiguous/main.c new file mode 100644 index 000000000..2ee1d8075 --- /dev/null +++ b/meson/test cases/failing test/3 ambiguous/main.c @@ -0,0 +1,6 @@ +#include +#include + +int main(void) { + kill(getpid(), SIGSEGV); +} diff --git a/meson/test cases/failing test/3 ambiguous/meson.build b/meson/test cases/failing test/3 ambiguous/meson.build new file mode 100644 index 000000000..58f0de071 --- /dev/null +++ b/meson/test cases/failing test/3 ambiguous/meson.build @@ -0,0 +1,10 @@ +project('ambiguous', 'c') + +if build_machine.system() == 'windows' + error('MESON_SKIP_TEST test is not compatible with MS Windows.') +else + exe = executable('main', 'main.c') + test_runner = find_program('test_runner.sh') + + test('My Ambiguous Status Test', test_runner, args : [exe.full_path()]) +endif diff --git a/meson/test cases/failing test/3 ambiguous/test_runner.sh b/meson/test cases/failing test/3 ambiguous/test_runner.sh new file mode 100755 index 000000000..08873ce26 --- /dev/null +++ b/meson/test cases/failing test/3 ambiguous/test_runner.sh @@ -0,0 +1,7 @@ +#!/bin/sh +# +# This tests that using a shell as an intermediary between Meson and the +# actual unit test which dies due to a signal is still recorded correctly. +# +# The quotes are because the path may contain spaces. +"$1" diff --git a/meson/test cases/failing test/4 hard error/main.c b/meson/test cases/failing test/4 hard error/main.c new file mode 100644 index 000000000..a1e705ade --- /dev/null +++ b/meson/test cases/failing test/4 hard error/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 99; +} diff --git a/meson/test cases/failing test/4 hard error/meson.build b/meson/test cases/failing test/4 hard error/meson.build new file mode 100644 index 000000000..6979b0416 --- /dev/null +++ b/meson/test cases/failing test/4 hard error/meson.build @@ -0,0 +1,4 @@ +project('trivial', 'c') + +# Exit code 99 even overrides should_fail +test('My Test', executable('main', 'main.c'), should_fail: true) diff --git a/meson/test cases/failing test/5 tap tests/meson.build b/meson/test cases/failing test/5 tap tests/meson.build new file mode 100644 index 000000000..664ac34d2 --- /dev/null +++ b/meson/test cases/failing test/5 tap tests/meson.build @@ -0,0 +1,9 @@ +project('test features', 'c') + +tester = executable('tester', 'tester.c') +test_with_status = executable('test-with-status', 'tester_with_status.c') +test('nonzero return code no tests', tester, args : [], protocol: 'tap') +test('nonzero return code with tests', test_with_status, protocol: 'tap') +test('missing test', tester, args : ['1..1'], protocol: 'tap') +test('incorrect skip', tester, args : ['1..1 # skip\nok 1'], protocol: 'tap') +test('partially skipped', tester, args : ['not ok 1\nok 2 # skip'], protocol: 'tap') diff --git a/meson/test cases/failing test/5 tap tests/tester.c b/meson/test cases/failing test/5 tap tests/tester.c new file mode 100644 index 000000000..ac582e7c0 --- /dev/null +++ b/meson/test cases/failing test/5 tap tests/tester.c @@ -0,0 +1,10 @@ +#include + +int main(int argc, char **argv) { + if (argc != 2) { + fprintf(stderr, "Incorrect number of arguments, got %i\n", argc); + return 1; + } + puts(argv[1]); + return 0; +} diff --git a/meson/test cases/failing test/5 tap tests/tester_with_status.c b/meson/test cases/failing test/5 tap tests/tester_with_status.c new file mode 100644 index 000000000..7613afefc --- /dev/null +++ b/meson/test cases/failing test/5 tap tests/tester_with_status.c @@ -0,0 +1,8 @@ +#include +#include + +int main(int argc, char **argv) { + puts("1..1"); + puts("not ok 1 - some test"); + return 2; +} diff --git a/meson/test cases/failing test/6 xpass/meson.build b/meson/test cases/failing test/6 xpass/meson.build new file mode 100644 index 000000000..7649dde44 --- /dev/null +++ b/meson/test cases/failing test/6 xpass/meson.build @@ -0,0 +1,4 @@ +project('unexpected pass', 'c') + +test('should_fail_but_does_not', executable('xpass', 'xpass.c'), + should_fail: true) diff --git a/meson/test cases/failing test/6 xpass/xpass.c b/meson/test cases/failing test/6 xpass/xpass.c new file mode 100644 index 000000000..0314ff17b --- /dev/null +++ b/meson/test cases/failing test/6 xpass/xpass.c @@ -0,0 +1 @@ +int main(int argc, char **argv) { return 0; } diff --git a/meson/test cases/failing/1 project not first/meson.build b/meson/test cases/failing/1 project not first/meson.build new file mode 100644 index 000000000..f30e155bf --- /dev/null +++ b/meson/test cases/failing/1 project not first/meson.build @@ -0,0 +1,4 @@ +var = 'assignment before project() call' +project('no worky', 'c') + +test('not run', executable('prog', 'prog.c')) diff --git a/meson/test cases/failing/1 project not first/prog.c b/meson/test cases/failing/1 project not first/prog.c new file mode 100644 index 000000000..0314ff17b --- /dev/null +++ b/meson/test cases/failing/1 project not first/prog.c @@ -0,0 +1 @@ +int main(int argc, char **argv) { return 0; } diff --git a/meson/test cases/failing/1 project not first/test.json b/meson/test cases/failing/1 project not first/test.json new file mode 100644 index 000000000..70f3c41ac --- /dev/null +++ b/meson/test cases/failing/1 project not first/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "ERROR: First statement must be a call to project" + } + ] +} diff --git a/meson/test cases/failing/10 out of bounds/meson.build b/meson/test cases/failing/10 out of bounds/meson.build new file mode 100644 index 000000000..f79167578 --- /dev/null +++ b/meson/test cases/failing/10 out of bounds/meson.build @@ -0,0 +1,4 @@ +project('out of bounds', 'c') + +x = [] +y = x[0] diff --git a/meson/test cases/failing/10 out of bounds/test.json b/meson/test cases/failing/10 out of bounds/test.json new file mode 100644 index 000000000..e27d99080 --- /dev/null +++ b/meson/test cases/failing/10 out of bounds/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/10 out of bounds/meson.build:4:0: ERROR: Index 0 out of bounds of array of size 0." + } + ] +} diff --git a/meson/test cases/failing/100 no lang/main.c b/meson/test cases/failing/100 no lang/main.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/failing/100 no lang/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/failing/100 no lang/meson.build b/meson/test cases/failing/100 no lang/meson.build new file mode 100644 index 000000000..85c5db8e5 --- /dev/null +++ b/meson/test cases/failing/100 no lang/meson.build @@ -0,0 +1,2 @@ +project('target without lang') +executable('main', 'main.c') diff --git a/meson/test cases/failing/100 no lang/test.json b/meson/test cases/failing/100 no lang/test.json new file mode 100644 index 000000000..58dc1ac2e --- /dev/null +++ b/meson/test cases/failing/100 no lang/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/100 no lang/meson.build:2:0: ERROR: No host machine compiler for \"main.c\"" + } + ] +} diff --git a/meson/test cases/failing/101 no glib-compile-resources/meson.build b/meson/test cases/failing/101 no glib-compile-resources/meson.build new file mode 100644 index 000000000..aae0569da --- /dev/null +++ b/meson/test cases/failing/101 no glib-compile-resources/meson.build @@ -0,0 +1,8 @@ +project('no glib-compile-resources') + +if find_program('glib-compile-resources', required: false).found() + error('MESON_SKIP_TEST test only applicable when glib-compile-resources is missing.') +endif + +gnome = import('gnome') +res = gnome.compile_resources('resources', 'trivial.gresource.xml') diff --git a/meson/test cases/failing/101 no glib-compile-resources/test.json b/meson/test cases/failing/101 no glib-compile-resources/test.json new file mode 100644 index 000000000..d81b0c0db --- /dev/null +++ b/meson/test cases/failing/101 no glib-compile-resources/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/101 no glib-compile-resources/meson.build:8:0: ERROR: Program 'glib-compile-resources' not found" + } + ] +} diff --git a/meson/test cases/failing/101 no glib-compile-resources/trivial.gresource.xml b/meson/test cases/failing/101 no glib-compile-resources/trivial.gresource.xml new file mode 100644 index 000000000..1447b984d --- /dev/null +++ b/meson/test cases/failing/101 no glib-compile-resources/trivial.gresource.xml @@ -0,0 +1,3 @@ + + + diff --git a/meson/test cases/failing/102 number in combo/meson.build b/meson/test cases/failing/102 number in combo/meson.build new file mode 100644 index 000000000..1a647df1a --- /dev/null +++ b/meson/test cases/failing/102 number in combo/meson.build @@ -0,0 +1 @@ +project('number in combo') diff --git a/meson/test cases/failing/102 number in combo/nativefile.ini b/meson/test cases/failing/102 number in combo/nativefile.ini new file mode 100644 index 000000000..55f10fc20 --- /dev/null +++ b/meson/test cases/failing/102 number in combo/nativefile.ini @@ -0,0 +1,2 @@ +[built-in options] +optimization = 1 diff --git a/meson/test cases/failing/102 number in combo/test.json b/meson/test cases/failing/102 number in combo/test.json new file mode 100644 index 000000000..8e70a6014 --- /dev/null +++ b/meson/test cases/failing/102 number in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/102 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." } + ] +} diff --git a/meson/test cases/failing/103 bool in combo/meson.build b/meson/test cases/failing/103 bool in combo/meson.build new file mode 100644 index 000000000..c5efd67ea --- /dev/null +++ b/meson/test cases/failing/103 bool in combo/meson.build @@ -0,0 +1 @@ +project('bool in combo') diff --git a/meson/test cases/failing/103 bool in combo/meson_options.txt b/meson/test cases/failing/103 bool in combo/meson_options.txt new file mode 100644 index 000000000..0c8f5de00 --- /dev/null +++ b/meson/test cases/failing/103 bool in combo/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'opt', + type : 'combo', + choices : ['true', 'false'] +) diff --git a/meson/test cases/failing/103 bool in combo/nativefile.ini b/meson/test cases/failing/103 bool in combo/nativefile.ini new file mode 100644 index 000000000..b423957cf --- /dev/null +++ b/meson/test cases/failing/103 bool in combo/nativefile.ini @@ -0,0 +1,2 @@ +[project options] +opt = true diff --git a/meson/test cases/failing/103 bool in combo/test.json b/meson/test cases/failing/103 bool in combo/test.json new file mode 100644 index 000000000..48d4cc39a --- /dev/null +++ b/meson/test cases/failing/103 bool in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/103 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." } + ] +} diff --git a/meson/test cases/failing/104 compiler no lang/meson.build b/meson/test cases/failing/104 compiler no lang/meson.build new file mode 100644 index 000000000..366bbdd5d --- /dev/null +++ b/meson/test cases/failing/104 compiler no lang/meson.build @@ -0,0 +1,2 @@ +project('compiler without lang') +meson.get_compiler('c') diff --git a/meson/test cases/failing/104 compiler no lang/test.json b/meson/test cases/failing/104 compiler no lang/test.json new file mode 100644 index 000000000..d8cc96eef --- /dev/null +++ b/meson/test cases/failing/104 compiler no lang/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/104 compiler no lang/meson.build:2:6: ERROR: Tried to access compiler for language \"c\", not specified for host machine." + } + ] +} diff --git a/meson/test cases/failing/105 no fallback/meson.build b/meson/test cases/failing/105 no fallback/meson.build new file mode 100644 index 000000000..0101bb84e --- /dev/null +++ b/meson/test cases/failing/105 no fallback/meson.build @@ -0,0 +1,2 @@ +project('no fallback', 'c') +foob_dep = dependency('foob', allow_fallback: false, required: true) diff --git a/meson/test cases/failing/105 no fallback/subprojects/foob/meson.build b/meson/test cases/failing/105 no fallback/subprojects/foob/meson.build new file mode 100644 index 000000000..b2c4814e2 --- /dev/null +++ b/meson/test cases/failing/105 no fallback/subprojects/foob/meson.build @@ -0,0 +1,2 @@ +project('foob', 'c') +meson.override_dependency('foob', declare_dependency()) diff --git a/meson/test cases/failing/105 no fallback/test.json b/meson/test cases/failing/105 no fallback/test.json new file mode 100644 index 000000000..e0340616c --- /dev/null +++ b/meson/test cases/failing/105 no fallback/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": ".*/meson\\.build:2:0: ERROR: (Pkg-config binary for machine MachineChoice\\.HOST not found\\. Giving up\\.|Dependency \"foob\" not found, tried .*)" + } + ] +} diff --git a/meson/test cases/failing/106 feature require/meson.build b/meson/test cases/failing/106 feature require/meson.build new file mode 100644 index 000000000..d976ae8d6 --- /dev/null +++ b/meson/test cases/failing/106 feature require/meson.build @@ -0,0 +1,2 @@ +project('no fallback', 'c') +foo = get_option('reqfeature').require(false, error_message: 'frobnicator not available') diff --git a/meson/test cases/failing/106 feature require/meson_options.txt b/meson/test cases/failing/106 feature require/meson_options.txt new file mode 100644 index 000000000..d6f2ce656 --- /dev/null +++ b/meson/test cases/failing/106 feature require/meson_options.txt @@ -0,0 +1,2 @@ +option('reqfeature', type : 'feature', value : 'enabled', description : 'A required feature') + diff --git a/meson/test cases/failing/106 feature require/test.json b/meson/test cases/failing/106 feature require/test.json new file mode 100644 index 000000000..7c4640de0 --- /dev/null +++ b/meson/test cases/failing/106 feature require/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": ".*/meson\\.build:2:0: ERROR: Feature reqfeature cannot be enabled: frobnicator not available" + } + ] +} diff --git a/meson/test cases/failing/107 no build get_external_property/meson.build b/meson/test cases/failing/107 no build get_external_property/meson.build new file mode 100644 index 000000000..8a4215c9f --- /dev/null +++ b/meson/test cases/failing/107 no build get_external_property/meson.build @@ -0,0 +1,3 @@ +project('missing property') + +message(meson.get_external_property('nonexisting', native : true)) diff --git a/meson/test cases/failing/107 no build get_external_property/test.json b/meson/test cases/failing/107 no build get_external_property/test.json new file mode 100644 index 000000000..b95427efd --- /dev/null +++ b/meson/test cases/failing/107 no build get_external_property/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/107 no build get_external_property/meson.build:3:0: ERROR: Unknown property for build machine: nonexisting" + } + ] +} diff --git a/meson/test cases/failing/108 enter subdir twice/meson.build b/meson/test cases/failing/108 enter subdir twice/meson.build new file mode 100644 index 000000000..9343233d5 --- /dev/null +++ b/meson/test cases/failing/108 enter subdir twice/meson.build @@ -0,0 +1,3 @@ +project('subdir2', 'c') +subdir('sub') +subdir('sub') diff --git a/meson/test cases/failing/108 enter subdir twice/sub/meson.build b/meson/test cases/failing/108 enter subdir twice/sub/meson.build new file mode 100644 index 000000000..d036a3fe5 --- /dev/null +++ b/meson/test cases/failing/108 enter subdir twice/sub/meson.build @@ -0,0 +1 @@ +message('Now in subdir') diff --git a/meson/test cases/failing/108 enter subdir twice/test.json b/meson/test cases/failing/108 enter subdir twice/test.json new file mode 100644 index 000000000..0a8e12769 --- /dev/null +++ b/meson/test cases/failing/108 enter subdir twice/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/108 enter subdir twice/meson.build:3:0: ERROR: Tried to enter directory \"sub\", which has already been visited." + } + ] +} diff --git a/meson/test cases/failing/109 invalid fstring/meson.build b/meson/test cases/failing/109 invalid fstring/meson.build new file mode 100644 index 000000000..dd22f56b2 --- /dev/null +++ b/meson/test cases/failing/109 invalid fstring/meson.build @@ -0,0 +1,4 @@ +project('invalid-fstring', 'c') + +dict = {'key': true} +s = f'invalid fstring: @dict@' diff --git a/meson/test cases/failing/109 invalid fstring/test.json b/meson/test cases/failing/109 invalid fstring/test.json new file mode 100644 index 000000000..71d8f5927 --- /dev/null +++ b/meson/test cases/failing/109 invalid fstring/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/109 invalid fstring/meson.build:4:0: ERROR: Identifier \"dict\" does not name a formattable variable (has to be an integer, a string, a floating point number or a boolean)." + } + ] +} diff --git a/meson/test cases/failing/11 object arithmetic/meson.build b/meson/test cases/failing/11 object arithmetic/meson.build new file mode 100644 index 000000000..9a7a6565c --- /dev/null +++ b/meson/test cases/failing/11 object arithmetic/meson.build @@ -0,0 +1,3 @@ +project('object arithmetic', 'c') + +foo = '5' + meson diff --git a/meson/test cases/failing/11 object arithmetic/test.json b/meson/test cases/failing/11 object arithmetic/test.json new file mode 100644 index 000000000..5339facda --- /dev/null +++ b/meson/test cases/failing/11 object arithmetic/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/11 object arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*" + } + ] +} diff --git a/meson/test cases/failing/110 invalid fstring/meson.build b/meson/test cases/failing/110 invalid fstring/meson.build new file mode 100644 index 000000000..973df3098 --- /dev/null +++ b/meson/test cases/failing/110 invalid fstring/meson.build @@ -0,0 +1,3 @@ +project('invalid-fstring', 'c') + +z = f'invalid fstring: @foo@' diff --git a/meson/test cases/failing/110 invalid fstring/test.json b/meson/test cases/failing/110 invalid fstring/test.json new file mode 100644 index 000000000..bfd0e2d42 --- /dev/null +++ b/meson/test cases/failing/110 invalid fstring/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/110 invalid fstring/meson.build:3:0: ERROR: Identifier \"foo\" does not name a variable." + } + ] +} diff --git a/meson/test cases/failing/111 compiler argument checking/meson.build b/meson/test cases/failing/111 compiler argument checking/meson.build new file mode 100644 index 000000000..bb1f44768 --- /dev/null +++ b/meson/test cases/failing/111 compiler argument checking/meson.build @@ -0,0 +1,4 @@ +project('compiler argument checking test', 'c') + +cc = meson.get_compiler('c') +add_project_arguments(cc.get_supported_arguments('-meson-goober-arg-for-testing', checked : 'require'), language : 'c') diff --git a/meson/test cases/failing/111 compiler argument checking/test.json b/meson/test cases/failing/111 compiler argument checking/test.json new file mode 100644 index 000000000..f41f2d2cf --- /dev/null +++ b/meson/test cases/failing/111 compiler argument checking/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/111 compiler argument checking/meson.build:4:0: ERROR: Compiler for C does not support \"-meson-goober-arg-for-testing\"" + } + ] +} diff --git a/meson/test cases/failing/112 empty fallback/meson.build b/meson/test cases/failing/112 empty fallback/meson.build new file mode 100644 index 000000000..f4eb5feb9 --- /dev/null +++ b/meson/test cases/failing/112 empty fallback/meson.build @@ -0,0 +1,6 @@ +project('empty fallback') + +# There is a subproject named 'foo' that overrides that dependency, +# but `fallback: []` should not allow to use it. Same behaviour than with +# `allow_fallback: false` +dependency('foo', fallback: []) diff --git a/meson/test cases/failing/112 empty fallback/subprojects/foo/meson.build b/meson/test cases/failing/112 empty fallback/subprojects/foo/meson.build new file mode 100644 index 000000000..c9e134b5e --- /dev/null +++ b/meson/test cases/failing/112 empty fallback/subprojects/foo/meson.build @@ -0,0 +1,3 @@ +project('foo') + +meson.override_dependency('foo', declare_dependency()) diff --git a/meson/test cases/failing/112 empty fallback/test.json b/meson/test cases/failing/112 empty fallback/test.json new file mode 100644 index 000000000..89520efc3 --- /dev/null +++ b/meson/test cases/failing/112 empty fallback/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/112 empty fallback/meson.build:6:0: ERROR: Dependency \"foo\" not found, tried pkgconfig and cmake" + } + ] +} diff --git a/meson/test cases/failing/113 cmake executable dependency/meson.build b/meson/test cases/failing/113 cmake executable dependency/meson.build new file mode 100644 index 000000000..bfb03ef20 --- /dev/null +++ b/meson/test cases/failing/113 cmake executable dependency/meson.build @@ -0,0 +1,9 @@ +project('cmake-executable-dependency', ['c', 'cpp']) + +if not find_program('cmake', required: false).found() + error('MESON_SKIP_TEST CMake is not installed') +endif + +cmake = import('cmake') +cmlib = cmake.subproject('cmlib') +maind = cmlib.dependency('main') diff --git a/meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/CMakeLists.txt b/meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/CMakeLists.txt new file mode 100644 index 000000000..006787986 --- /dev/null +++ b/meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/CMakeLists.txt @@ -0,0 +1,5 @@ +cmake_minimum_required(VERSION 3.5) + +project(cmlib) + +add_executable(main main.c) diff --git a/meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/main.c b/meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/main.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/failing/113 cmake executable dependency/subprojects/cmlib/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/failing/113 cmake executable dependency/test.json b/meson/test cases/failing/113 cmake executable dependency/test.json new file mode 100644 index 000000000..1cb4a0f0a --- /dev/null +++ b/meson/test cases/failing/113 cmake executable dependency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/113 cmake executable dependency/meson.build:9:0: ERROR: main is an executable and does not support the dependency() method. Use target() instead." + } + ] +} diff --git a/meson/test cases/failing/114 allow_fallback with fallback/meson.build b/meson/test cases/failing/114 allow_fallback with fallback/meson.build new file mode 100644 index 000000000..2874e424c --- /dev/null +++ b/meson/test cases/failing/114 allow_fallback with fallback/meson.build @@ -0,0 +1,3 @@ +project('fallback and allow_fallback') + +dependency('foo', fallback: 'foo', allow_fallback: false) diff --git a/meson/test cases/failing/114 allow_fallback with fallback/test.json b/meson/test cases/failing/114 allow_fallback with fallback/test.json new file mode 100644 index 000000000..1e5712e78 --- /dev/null +++ b/meson/test cases/failing/114 allow_fallback with fallback/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "line": "test cases/failing/114 allow_fallback with fallback/meson.build:3:0: ERROR: \"fallback\" and \"allow_fallback\" arguments are mutually exclusive" + } + ] +} + diff --git a/meson/test cases/failing/12 string arithmetic/meson.build b/meson/test cases/failing/12 string arithmetic/meson.build new file mode 100644 index 000000000..c02a865a4 --- /dev/null +++ b/meson/test cases/failing/12 string arithmetic/meson.build @@ -0,0 +1,3 @@ +project('string arithmetic', 'c') + +foo = 'a' + 3 diff --git a/meson/test cases/failing/12 string arithmetic/test.json b/meson/test cases/failing/12 string arithmetic/test.json new file mode 100644 index 000000000..476f9bb20 --- /dev/null +++ b/meson/test cases/failing/12 string arithmetic/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/12 string arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*" + } + ] +} diff --git a/meson/test cases/failing/13 array arithmetic/meson.build b/meson/test cases/failing/13 array arithmetic/meson.build new file mode 100644 index 000000000..3ddf06092 --- /dev/null +++ b/meson/test cases/failing/13 array arithmetic/meson.build @@ -0,0 +1,3 @@ +project('array arithmetic', 'c') + +foo = ['a', 'b'] * 3 diff --git a/meson/test cases/failing/13 array arithmetic/test.json b/meson/test cases/failing/13 array arithmetic/test.json new file mode 100644 index 000000000..55056ce6d --- /dev/null +++ b/meson/test cases/failing/13 array arithmetic/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/13 array arithmetic/meson.build:3:0: ERROR: Multiplication works only with integers." + } + ] +} diff --git a/meson/test cases/failing/14 invalid option name/meson.build b/meson/test cases/failing/14 invalid option name/meson.build new file mode 100644 index 000000000..b99fd21ad --- /dev/null +++ b/meson/test cases/failing/14 invalid option name/meson.build @@ -0,0 +1 @@ +project('foo', 'c') diff --git a/meson/test cases/failing/14 invalid option name/meson_options.txt b/meson/test cases/failing/14 invalid option name/meson_options.txt new file mode 100644 index 000000000..aab6ae8de --- /dev/null +++ b/meson/test cases/failing/14 invalid option name/meson_options.txt @@ -0,0 +1 @@ +option('invalid:name', type : 'boolean', value : false) diff --git a/meson/test cases/failing/14 invalid option name/test.json b/meson/test cases/failing/14 invalid option name/test.json new file mode 100644 index 000000000..71e685da4 --- /dev/null +++ b/meson/test cases/failing/14 invalid option name/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/14 invalid option name/meson_options.txt:1:0: ERROR: Option names can only contain letters, numbers or dashes." + } + ] +} diff --git a/meson/test cases/failing/15 kwarg before arg/meson.build b/meson/test cases/failing/15 kwarg before arg/meson.build new file mode 100644 index 000000000..f07d950bd --- /dev/null +++ b/meson/test cases/failing/15 kwarg before arg/meson.build @@ -0,0 +1,3 @@ +project('kwarg before arg', 'c') + +executable(sources : 'prog.c', 'prog') diff --git a/meson/test cases/failing/15 kwarg before arg/prog.c b/meson/test cases/failing/15 kwarg before arg/prog.c new file mode 100644 index 000000000..0314ff17b --- /dev/null +++ b/meson/test cases/failing/15 kwarg before arg/prog.c @@ -0,0 +1 @@ +int main(int argc, char **argv) { return 0; } diff --git a/meson/test cases/failing/15 kwarg before arg/test.json b/meson/test cases/failing/15 kwarg before arg/test.json new file mode 100644 index 000000000..c7f72c35e --- /dev/null +++ b/meson/test cases/failing/15 kwarg before arg/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/15 kwarg before arg/meson.build:3:0: ERROR: All keyword arguments must be after positional arguments." + } + ] +} diff --git a/meson/test cases/failing/16 extract from subproject/main.c b/meson/test cases/failing/16 extract from subproject/main.c new file mode 100644 index 000000000..6c8ecaea5 --- /dev/null +++ b/meson/test cases/failing/16 extract from subproject/main.c @@ -0,0 +1,5 @@ +int sub_lib_method(void); + +int main(void) { + return 1337 - sub_lib_method(); +} diff --git a/meson/test cases/failing/16 extract from subproject/meson.build b/meson/test cases/failing/16 extract from subproject/meson.build new file mode 100644 index 000000000..286aaa191 --- /dev/null +++ b/meson/test cases/failing/16 extract from subproject/meson.build @@ -0,0 +1,9 @@ +project('extract subproject object', 'c') + +sub = subproject('sub_project') +lib = sub.get_variable('lib') + +exe = executable('exe', 'main.c', + objects : lib.extract_objects('sub_lib.c')) + +test('extraction test', exe) diff --git a/meson/test cases/failing/16 extract from subproject/subprojects/sub_project/meson.build b/meson/test cases/failing/16 extract from subproject/subprojects/sub_project/meson.build new file mode 100644 index 000000000..0810df503 --- /dev/null +++ b/meson/test cases/failing/16 extract from subproject/subprojects/sub_project/meson.build @@ -0,0 +1,3 @@ +project('extract subproject object -- subproject', 'c') + +lib = library('sub_lib', 'sub_lib.c') diff --git a/meson/test cases/failing/16 extract from subproject/subprojects/sub_project/sub_lib.c b/meson/test cases/failing/16 extract from subproject/subprojects/sub_project/sub_lib.c new file mode 100644 index 000000000..be3c9aae2 --- /dev/null +++ b/meson/test cases/failing/16 extract from subproject/subprojects/sub_project/sub_lib.c @@ -0,0 +1,3 @@ +int sub_lib_method() { + return 1337; +} diff --git a/meson/test cases/failing/16 extract from subproject/test.json b/meson/test cases/failing/16 extract from subproject/test.json new file mode 100644 index 000000000..2e32904dd --- /dev/null +++ b/meson/test cases/failing/16 extract from subproject/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/16 extract from subproject/meson.build:6:0: ERROR: Tried to extract objects from a different subproject." + } + ] +} diff --git a/meson/test cases/failing/17 same target/file.c b/meson/test cases/failing/17 same target/file.c new file mode 100644 index 000000000..741237235 --- /dev/null +++ b/meson/test cases/failing/17 same target/file.c @@ -0,0 +1 @@ +int func() { return 0; } diff --git a/meson/test cases/failing/17 same target/meson.build b/meson/test cases/failing/17 same target/meson.build new file mode 100644 index 000000000..ee586d0f4 --- /dev/null +++ b/meson/test cases/failing/17 same target/meson.build @@ -0,0 +1,4 @@ +project('same target', 'c') + +static_library('foo', 'file.c') +static_library('foo', 'file.c') diff --git a/meson/test cases/failing/17 same target/test.json b/meson/test cases/failing/17 same target/test.json new file mode 100644 index 000000000..0005ba4a8 --- /dev/null +++ b/meson/test cases/failing/17 same target/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/17 same target/meson.build:4:0: ERROR: Tried to create target \"foo\", but a target of that name already exists." + } + ] +} diff --git a/meson/test cases/failing/18 wrong plusassign/meson.build b/meson/test cases/failing/18 wrong plusassign/meson.build new file mode 100644 index 000000000..dfb9e6d53 --- /dev/null +++ b/meson/test cases/failing/18 wrong plusassign/meson.build @@ -0,0 +1,3 @@ +project('false plusassign', 'c') + +3 += 4 diff --git a/meson/test cases/failing/18 wrong plusassign/test.json b/meson/test cases/failing/18 wrong plusassign/test.json new file mode 100644 index 000000000..c698f85b9 --- /dev/null +++ b/meson/test cases/failing/18 wrong plusassign/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/18 wrong plusassign/meson.build:3:0: ERROR: Plusassignment target must be an id." + } + ] +} diff --git a/meson/test cases/failing/19 target clash/clash.c b/meson/test cases/failing/19 target clash/clash.c new file mode 100644 index 000000000..2daa06cf6 --- /dev/null +++ b/meson/test cases/failing/19 target clash/clash.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char **argv) { + printf("Clash 2.\n"); + return 0; +} diff --git a/meson/test cases/failing/19 target clash/meson.build b/meson/test cases/failing/19 target clash/meson.build new file mode 100644 index 000000000..4fd09348e --- /dev/null +++ b/meson/test cases/failing/19 target clash/meson.build @@ -0,0 +1,15 @@ +project('clash', 'c') + +# This setup causes a namespace clash when two Meson targets would +# produce a Ninja targets with the same name. It only works on +# unix, because on Windows the target has a '.exe' suffix. +# +# This test might fail to work on different backends or when +# output location is redirected. + +if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' + error('MESON_SKIP_TEST test only works on platforms where executables have no suffix.') +endif + +executable('clash', 'clash.c') +run_target('clash', command: ['echo', 'clash 1']) diff --git a/meson/test cases/failing/19 target clash/test.json b/meson/test cases/failing/19 target clash/test.json new file mode 100644 index 000000000..d22b894c4 --- /dev/null +++ b/meson/test cases/failing/19 target clash/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "ERROR: Multiple producers for Ninja target \"clash\". Please rename your targets." + } + ] +} diff --git a/meson/test cases/failing/2 missing file/meson.build b/meson/test cases/failing/2 missing file/meson.build new file mode 100644 index 000000000..6b9133d62 --- /dev/null +++ b/meson/test cases/failing/2 missing file/meson.build @@ -0,0 +1,3 @@ +project('missing file', 'c') + +executable('prog', 'missing.c') diff --git a/meson/test cases/failing/2 missing file/test.json b/meson/test cases/failing/2 missing file/test.json new file mode 100644 index 000000000..b95b8b082 --- /dev/null +++ b/meson/test cases/failing/2 missing file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/2 missing file/meson.build:3:0: ERROR: File missing.c does not exist." + } + ] +} diff --git a/meson/test cases/failing/20 version/meson.build b/meson/test cases/failing/20 version/meson.build new file mode 100644 index 000000000..9a3a851cb --- /dev/null +++ b/meson/test cases/failing/20 version/meson.build @@ -0,0 +1 @@ +project('version mismatch', 'c', meson_version : '>100.0.0') diff --git a/meson/test cases/failing/20 version/test.json b/meson/test cases/failing/20 version/test.json new file mode 100644 index 000000000..f3306246b --- /dev/null +++ b/meson/test cases/failing/20 version/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/20 version/meson\\.build:1:0: ERROR: Meson version is .* but project requires >100\\.0\\.0" + } + ] +} diff --git a/meson/test cases/failing/21 subver/meson.build b/meson/test cases/failing/21 subver/meson.build new file mode 100644 index 000000000..854f13ce0 --- /dev/null +++ b/meson/test cases/failing/21 subver/meson.build @@ -0,0 +1,3 @@ +project('master', 'c') + +x = subproject('foo', version : '>1.0.0') diff --git a/meson/test cases/failing/21 subver/subprojects/foo/meson.build b/meson/test cases/failing/21 subver/subprojects/foo/meson.build new file mode 100644 index 000000000..f4ff53548 --- /dev/null +++ b/meson/test cases/failing/21 subver/subprojects/foo/meson.build @@ -0,0 +1 @@ +project('foo', 'c', version : '1.0.0') diff --git a/meson/test cases/failing/21 subver/test.json b/meson/test cases/failing/21 subver/test.json new file mode 100644 index 000000000..f8cfd3a1f --- /dev/null +++ b/meson/test cases/failing/21 subver/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/21 subver/meson.build:3:0: ERROR: Subproject foo version is 1.0.0 but >1.0.0 required." + } + ] +} diff --git a/meson/test cases/failing/22 assert/meson.build b/meson/test cases/failing/22 assert/meson.build new file mode 100644 index 000000000..ae3a19ca1 --- /dev/null +++ b/meson/test cases/failing/22 assert/meson.build @@ -0,0 +1,3 @@ +project('failing assert', 'c') + +assert(false, 'I am fail.') diff --git a/meson/test cases/failing/22 assert/test.json b/meson/test cases/failing/22 assert/test.json new file mode 100644 index 000000000..edae9996e --- /dev/null +++ b/meson/test cases/failing/22 assert/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/22 assert/meson.build:3:0: ERROR: Assert failed: I am fail." + } + ] +} diff --git a/meson/test cases/failing/23 rel testdir/meson.build b/meson/test cases/failing/23 rel testdir/meson.build new file mode 100644 index 000000000..c10558b34 --- /dev/null +++ b/meson/test cases/failing/23 rel testdir/meson.build @@ -0,0 +1,4 @@ +project('nonabs workdir', 'c') + +exe = executable('simple', 'simple.c') +test('simple', exe, workdir : '.') diff --git a/meson/test cases/failing/23 rel testdir/simple.c b/meson/test cases/failing/23 rel testdir/simple.c new file mode 100644 index 000000000..11b7fad8e --- /dev/null +++ b/meson/test cases/failing/23 rel testdir/simple.c @@ -0,0 +1,3 @@ +int main(int argc, char **argv) { + return 0; +} diff --git a/meson/test cases/failing/23 rel testdir/test.json b/meson/test cases/failing/23 rel testdir/test.json new file mode 100644 index 000000000..79ab48a43 --- /dev/null +++ b/meson/test cases/failing/23 rel testdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/23 rel testdir/meson.build:4:0: ERROR: benchmark keyword argument \"workdir\" must be an absolute path" + } + ] +} diff --git a/meson/test cases/failing/24 int conversion/meson.build b/meson/test cases/failing/24 int conversion/meson.build new file mode 100644 index 000000000..51f6c7e16 --- /dev/null +++ b/meson/test cases/failing/24 int conversion/meson.build @@ -0,0 +1,3 @@ +project('int conversion', 'c') + +'notanumber'.to_int() diff --git a/meson/test cases/failing/24 int conversion/test.json b/meson/test cases/failing/24 int conversion/test.json new file mode 100644 index 000000000..e749928fa --- /dev/null +++ b/meson/test cases/failing/24 int conversion/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/24 int conversion/meson.build:3:13: ERROR: String 'notanumber' cannot be converted to int" + } + ] +} diff --git a/meson/test cases/failing/25 badlang/meson.build b/meson/test cases/failing/25 badlang/meson.build new file mode 100644 index 000000000..f6bf0cca0 --- /dev/null +++ b/meson/test cases/failing/25 badlang/meson.build @@ -0,0 +1,3 @@ +project('badlang', 'c') + +add_languages('nonexisting') diff --git a/meson/test cases/failing/25 badlang/test.json b/meson/test cases/failing/25 badlang/test.json new file mode 100644 index 000000000..0b23fd7e5 --- /dev/null +++ b/meson/test cases/failing/25 badlang/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/25 badlang/meson.build:3:0: ERROR: Tried to use unknown language \"nonexisting\"." + } + ] +} diff --git a/meson/test cases/failing/26 output subdir/foo.in b/meson/test cases/failing/26 output subdir/foo.in new file mode 100644 index 000000000..3d1bf19eb --- /dev/null +++ b/meson/test cases/failing/26 output subdir/foo.in @@ -0,0 +1 @@ +Nothing here. diff --git a/meson/test cases/failing/26 output subdir/meson.build b/meson/test cases/failing/26 output subdir/meson.build new file mode 100644 index 000000000..4eb422ce4 --- /dev/null +++ b/meson/test cases/failing/26 output subdir/meson.build @@ -0,0 +1,5 @@ +project('outdir path', 'c') + +configure_file(input : 'foo.in', + output : 'subdir/foo', + copy: true) diff --git a/meson/test cases/failing/26 output subdir/subdir/dummy.txt b/meson/test cases/failing/26 output subdir/subdir/dummy.txt new file mode 100644 index 000000000..f10acf3e5 --- /dev/null +++ b/meson/test cases/failing/26 output subdir/subdir/dummy.txt @@ -0,0 +1,2 @@ +I'm only here because Git is stupid about empty dirs. + diff --git a/meson/test cases/failing/26 output subdir/test.json b/meson/test cases/failing/26 output subdir/test.json new file mode 100644 index 000000000..796468db1 --- /dev/null +++ b/meson/test cases/failing/26 output subdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/26 output subdir/meson.build:3:0: ERROR: Output file name must not contain a subdirectory." + } + ] +} diff --git a/meson/test cases/failing/27 noprog use/meson.build b/meson/test cases/failing/27 noprog use/meson.build new file mode 100644 index 000000000..e4de42fbd --- /dev/null +++ b/meson/test cases/failing/27 noprog use/meson.build @@ -0,0 +1,9 @@ +project('using not found exe', 'c') + +nope = find_program('nonexisting', required : false) + +custom_target( 'aa', + input: 'meson.build', + output: 'foobar', + command: [nope, '@INPUT@', '@OUTPUT@'] +) diff --git a/meson/test cases/failing/27 noprog use/test.json b/meson/test cases/failing/27 noprog use/test.json new file mode 100644 index 000000000..b84562e2b --- /dev/null +++ b/meson/test cases/failing/27 noprog use/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/27 noprog use/meson.build:5:0: ERROR: Tried to use not-found external program in \"command\"" + } + ] +} diff --git a/meson/test cases/failing/28 no crossprop/meson.build b/meson/test cases/failing/28 no crossprop/meson.build new file mode 100644 index 000000000..bd3a743d6 --- /dev/null +++ b/meson/test cases/failing/28 no crossprop/meson.build @@ -0,0 +1,3 @@ +project('no crossprop', 'c') + +message(meson.get_cross_property('nonexisting')) diff --git a/meson/test cases/failing/28 no crossprop/test.json b/meson/test cases/failing/28 no crossprop/test.json new file mode 100644 index 000000000..6fb9dce66 --- /dev/null +++ b/meson/test cases/failing/28 no crossprop/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/28 no crossprop/meson.build:3:0: ERROR: Unknown property for host machine: nonexisting" + } + ] +} diff --git a/meson/test cases/failing/29 nested ternary/meson.build b/meson/test cases/failing/29 nested ternary/meson.build new file mode 100644 index 000000000..f9c2e5f8a --- /dev/null +++ b/meson/test cases/failing/29 nested ternary/meson.build @@ -0,0 +1,3 @@ +project('nested ternary', 'c') + +x = true ? (false ? 1 : 0) : 2 diff --git a/meson/test cases/failing/29 nested ternary/test.json b/meson/test cases/failing/29 nested ternary/test.json new file mode 100644 index 000000000..ba0501372 --- /dev/null +++ b/meson/test cases/failing/29 nested ternary/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/29 nested ternary/meson.build:3:12: ERROR: Nested ternary operators are not allowed." + } + ] +} diff --git a/meson/test cases/failing/3 missing subdir/meson.build b/meson/test cases/failing/3 missing subdir/meson.build new file mode 100644 index 000000000..fef8c4bca --- /dev/null +++ b/meson/test cases/failing/3 missing subdir/meson.build @@ -0,0 +1,3 @@ +project('subdir', 'c') + +subdir('missing') diff --git a/meson/test cases/failing/3 missing subdir/test.json b/meson/test cases/failing/3 missing subdir/test.json new file mode 100644 index 000000000..562de2545 --- /dev/null +++ b/meson/test cases/failing/3 missing subdir/test.json @@ -0,0 +1,9 @@ +{ + "stdout": [ + { + "comment": "'missing/meson.build' gets transformed with os.path.sep separators", + "match": "re", + "line": "test cases/failing/3 missing subdir/meson\\.build:3:0: ERROR: Non\\-existent build file 'missing[\\\\/]meson\\.build'" + } + ] +} diff --git a/meson/test cases/failing/30 invalid man extension/foo.a1 b/meson/test cases/failing/30 invalid man extension/foo.a1 new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/30 invalid man extension/meson.build b/meson/test cases/failing/30 invalid man extension/meson.build new file mode 100644 index 000000000..45eddca40 --- /dev/null +++ b/meson/test cases/failing/30 invalid man extension/meson.build @@ -0,0 +1,2 @@ +project('man install', 'c') +m1 = install_man('foo.a1') diff --git a/meson/test cases/failing/30 invalid man extension/test.json b/meson/test cases/failing/30 invalid man extension/test.json new file mode 100644 index 000000000..3e5f45de5 --- /dev/null +++ b/meson/test cases/failing/30 invalid man extension/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/30 invalid man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 9" + } + ] +} diff --git a/meson/test cases/failing/31 no man extension/foo b/meson/test cases/failing/31 no man extension/foo new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/31 no man extension/meson.build b/meson/test cases/failing/31 no man extension/meson.build new file mode 100644 index 000000000..bf835713f --- /dev/null +++ b/meson/test cases/failing/31 no man extension/meson.build @@ -0,0 +1,2 @@ +project('man install', 'c') +m1 = install_man('foo') diff --git a/meson/test cases/failing/31 no man extension/test.json b/meson/test cases/failing/31 no man extension/test.json new file mode 100644 index 000000000..0972da1f3 --- /dev/null +++ b/meson/test cases/failing/31 no man extension/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/31 no man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 9" + } + ] +} diff --git a/meson/test cases/failing/32 exe static shared/meson.build b/meson/test cases/failing/32 exe static shared/meson.build new file mode 100644 index 000000000..2ae512583 --- /dev/null +++ b/meson/test cases/failing/32 exe static shared/meson.build @@ -0,0 +1,11 @@ +project('statchain', 'c') + +host_system = host_machine.system() +if host_system == 'windows' or host_system == 'darwin' + error('MESON_SKIP_TEST test only fails on Linux and BSD') +endif + +statlib = static_library('stat', 'stat.c', pic : false) +shlib2 = shared_library('shr2', 'shlib2.c', link_with : statlib) +exe = executable('prog', 'prog.c', link_with : shlib2) +test('runtest', exe) diff --git a/meson/test cases/failing/32 exe static shared/prog.c b/meson/test cases/failing/32 exe static shared/prog.c new file mode 100644 index 000000000..26603b694 --- /dev/null +++ b/meson/test cases/failing/32 exe static shared/prog.c @@ -0,0 +1,10 @@ +int shlibfunc2(); +int statlibfunc(); + +int main(int argc, char **argv) { + if (statlibfunc() != 42) + return 1; + if (shlibfunc2() != 24) + return 1; + return 0; +} diff --git a/meson/test cases/failing/32 exe static shared/shlib2.c b/meson/test cases/failing/32 exe static shared/shlib2.c new file mode 100644 index 000000000..5b68843dc --- /dev/null +++ b/meson/test cases/failing/32 exe static shared/shlib2.c @@ -0,0 +1,16 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int statlibfunc(void); + +int DLL_PUBLIC shlibfunc2(void) { + return 24; +} diff --git a/meson/test cases/failing/32 exe static shared/stat.c b/meson/test cases/failing/32 exe static shared/stat.c new file mode 100644 index 000000000..56ec66c67 --- /dev/null +++ b/meson/test cases/failing/32 exe static shared/stat.c @@ -0,0 +1,3 @@ +int statlibfunc() { + return 42; +} diff --git a/meson/test cases/failing/32 exe static shared/test.json b/meson/test cases/failing/32 exe static shared/test.json new file mode 100644 index 000000000..51d38046b --- /dev/null +++ b/meson/test cases/failing/32 exe static shared/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/32 exe static shared/meson.build:9:0: ERROR: Can't link non-PIC static library 'stat' into shared library 'shr2'. Use the 'pic' option to static_library to build with PIC." + } + ] +} diff --git a/meson/test cases/failing/33 non-root subproject/meson.build b/meson/test cases/failing/33 non-root subproject/meson.build new file mode 100644 index 000000000..c84dce7f9 --- /dev/null +++ b/meson/test cases/failing/33 non-root subproject/meson.build @@ -0,0 +1,3 @@ +project('non-root subproject', 'c') + +subdir('some') diff --git a/meson/test cases/failing/33 non-root subproject/some/meson.build b/meson/test cases/failing/33 non-root subproject/some/meson.build new file mode 100644 index 000000000..d82f45123 --- /dev/null +++ b/meson/test cases/failing/33 non-root subproject/some/meson.build @@ -0,0 +1 @@ +dependency('definitely-doesnt-exist', fallback : ['someproj', 'some_dep']) diff --git a/meson/test cases/failing/33 non-root subproject/test.json b/meson/test cases/failing/33 non-root subproject/test.json new file mode 100644 index 000000000..52baf6a65 --- /dev/null +++ b/meson/test cases/failing/33 non-root subproject/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/33 non-root subproject/some/meson.build:1:0: ERROR: Neither a subproject directory nor a someproj.wrap file was found." + } + ] +} diff --git a/meson/test cases/failing/34 dependency not-required then required/meson.build b/meson/test cases/failing/34 dependency not-required then required/meson.build new file mode 100644 index 000000000..1796699e4 --- /dev/null +++ b/meson/test cases/failing/34 dependency not-required then required/meson.build @@ -0,0 +1,4 @@ +project('dep-test', 'c', version : '1.0') + +foo_dep = dependency('foo-bar-xyz-12.3', required : false) +bar_dep = dependency('foo-bar-xyz-12.3') diff --git a/meson/test cases/failing/34 dependency not-required then required/test.json b/meson/test cases/failing/34 dependency not-required then required/test.json new file mode 100644 index 000000000..3cf35f5f4 --- /dev/null +++ b/meson/test cases/failing/34 dependency not-required then required/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": ".*/meson\\.build:4:0: ERROR: (Pkg-config binary for machine MachineChoice\\.HOST not found\\. Giving up\\.|Dependency \"foo\\-bar\\-xyz\\-12\\.3\" not found, tried .*)" + } + ] +} diff --git a/meson/test cases/failing/35 project argument after target/exe.c b/meson/test cases/failing/35 project argument after target/exe.c new file mode 100644 index 000000000..11b7fad8e --- /dev/null +++ b/meson/test cases/failing/35 project argument after target/exe.c @@ -0,0 +1,3 @@ +int main(int argc, char **argv) { + return 0; +} diff --git a/meson/test cases/failing/35 project argument after target/meson.build b/meson/test cases/failing/35 project argument after target/meson.build new file mode 100644 index 000000000..5402c67fc --- /dev/null +++ b/meson/test cases/failing/35 project argument after target/meson.build @@ -0,0 +1,7 @@ +project('project argument after target failing', 'c', + version : '2.3.4', + license : 'mylicense') + +add_project_arguments('-DPROJECT_OPTION', language: 'c') +e = executable('exe', 'exe.c') +add_project_arguments('-DPROJECT_OPTION1', language: 'c') diff --git a/meson/test cases/failing/35 project argument after target/test.json b/meson/test cases/failing/35 project argument after target/test.json new file mode 100644 index 000000000..f5efd9bd8 --- /dev/null +++ b/meson/test cases/failing/35 project argument after target/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/35 project argument after target/meson.build:7:0: ERROR: Tried to use 'add_project_arguments' after a build target has been declared." + } + ] +} diff --git a/meson/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build b/meson/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build new file mode 100644 index 000000000..874b58177 --- /dev/null +++ b/meson/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build @@ -0,0 +1,7 @@ +project('impossible-dep-test', 'c', version : '1.0') + +if not dependency('zlib', required: false).found() + error('MESON_SKIP_TEST test requires zlib') +endif + +dependency('zlib', version : ['>=1.0', '<1.0']) diff --git a/meson/test cases/failing/36 pkgconfig dependency impossible conditions/test.json b/meson/test cases/failing/36 pkgconfig dependency impossible conditions/test.json new file mode 100644 index 000000000..6deddbe3a --- /dev/null +++ b/meson/test cases/failing/36 pkgconfig dependency impossible conditions/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/36 pkgconfig dependency impossible conditions/meson.build:7:0: ERROR: Dependency 'zlib' is required but not found." + } + ] +} diff --git a/meson/test cases/failing/37 has function external dependency/meson.build b/meson/test cases/failing/37 has function external dependency/meson.build new file mode 100644 index 000000000..45a3bc246 --- /dev/null +++ b/meson/test cases/failing/37 has function external dependency/meson.build @@ -0,0 +1,8 @@ +project('has function ext dep', 'c') + +cc = meson.get_compiler('c') + +mylib = shared_library('mylib', 'mylib.c') +mylib_dep = declare_dependency(link_with : mylib) +# Only external dependencies can work here +cc.has_function('malloc', dependencies : mylib_dep) diff --git a/meson/test cases/failing/37 has function external dependency/mylib.c b/meson/test cases/failing/37 has function external dependency/mylib.c new file mode 100644 index 000000000..d9fbd342b --- /dev/null +++ b/meson/test cases/failing/37 has function external dependency/mylib.c @@ -0,0 +1 @@ +int testfunc(void) { return 0; } diff --git a/meson/test cases/failing/37 has function external dependency/test.json b/meson/test cases/failing/37 has function external dependency/test.json new file mode 100644 index 000000000..81d6f918c --- /dev/null +++ b/meson/test cases/failing/37 has function external dependency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/37 has function external dependency/meson.build:8:3: ERROR: Dependencies must be external dependencies" + } + ] +} diff --git a/meson/test cases/failing/38 libdir must be inside prefix/meson.build b/meson/test cases/failing/38 libdir must be inside prefix/meson.build new file mode 100644 index 000000000..4cce7f81c --- /dev/null +++ b/meson/test cases/failing/38 libdir must be inside prefix/meson.build @@ -0,0 +1,6 @@ +project('libdir prefix', 'c', + default_options : ['libdir=/opt/lib']) + +if host_machine.system() == 'windows' + error('MESON_SKIP_TEST: this test does not work on Windows since /foo is not absolute') +endif \ No newline at end of file diff --git a/meson/test cases/failing/38 libdir must be inside prefix/test.json b/meson/test cases/failing/38 libdir must be inside prefix/test.json new file mode 100644 index 000000000..d9256d1a2 --- /dev/null +++ b/meson/test cases/failing/38 libdir must be inside prefix/test.json @@ -0,0 +1,10 @@ +{ + "do_not_set_opts": [ + "libdir" + ], + "stdout": [ + { + "line": "test cases/failing/38 libdir must be inside prefix/meson.build:1:0: ERROR: The value of the 'libdir' option is '/opt/lib' which must be a subdir of the prefix '/usr'." + } + ] +} diff --git a/meson/test cases/failing/39 prefix absolute/meson.build b/meson/test cases/failing/39 prefix absolute/meson.build new file mode 100644 index 000000000..e2863e79c --- /dev/null +++ b/meson/test cases/failing/39 prefix absolute/meson.build @@ -0,0 +1,2 @@ +project('prefix-abs', 'c', + default_options : ['prefix=some/path/notabs']) diff --git a/meson/test cases/failing/39 prefix absolute/test.json b/meson/test cases/failing/39 prefix absolute/test.json new file mode 100644 index 000000000..2770243ee --- /dev/null +++ b/meson/test cases/failing/39 prefix absolute/test.json @@ -0,0 +1,11 @@ +{ + "do_not_set_opts": [ + "prefix" + ], + "stdout": [ + { + "comment": "literal 'some/path/notabs' appears in output, irrespective of os.path.sep, as that's the prefix", + "line": "test cases/failing/39 prefix absolute/meson.build:1:0: ERROR: prefix value 'some/path/notabs' must be an absolute path" + } + ] +} diff --git a/meson/test cases/failing/4 missing meson.build/meson.build b/meson/test cases/failing/4 missing meson.build/meson.build new file mode 100644 index 000000000..18654be2b --- /dev/null +++ b/meson/test cases/failing/4 missing meson.build/meson.build @@ -0,0 +1,3 @@ +project('missing meson.build', 'c') + +subdir('subdir') diff --git a/meson/test cases/failing/4 missing meson.build/subdir/dummy.txt b/meson/test cases/failing/4 missing meson.build/subdir/dummy.txt new file mode 100644 index 000000000..03327bdee --- /dev/null +++ b/meson/test cases/failing/4 missing meson.build/subdir/dummy.txt @@ -0,0 +1 @@ +This needs to be here because Git can't handle empty dirs. diff --git a/meson/test cases/failing/4 missing meson.build/test.json b/meson/test cases/failing/4 missing meson.build/test.json new file mode 100644 index 000000000..3857090b1 --- /dev/null +++ b/meson/test cases/failing/4 missing meson.build/test.json @@ -0,0 +1,9 @@ +{ + "stdout": [ + { + "match": "re", + "comment": "'subdir/meson.build' gets transformed with os.path.sep separators", + "line": "test cases/failing/4 missing meson\\.build/meson\\.build:3:0: ERROR: Non\\-existent build file 'subdir[\\\\/]meson\\.build'" + } + ] +} diff --git a/meson/test cases/failing/40 kwarg assign/dummy.c b/meson/test cases/failing/40 kwarg assign/dummy.c new file mode 100644 index 000000000..16fcdd9f4 --- /dev/null +++ b/meson/test cases/failing/40 kwarg assign/dummy.c @@ -0,0 +1,3 @@ +const char* dummy() { + return "I do nothing."; +} diff --git a/meson/test cases/failing/40 kwarg assign/meson.build b/meson/test cases/failing/40 kwarg assign/meson.build new file mode 100644 index 000000000..c86786fd1 --- /dev/null +++ b/meson/test cases/failing/40 kwarg assign/meson.build @@ -0,0 +1,4 @@ +project('assign in kwarg', 'c') + +executable('prog', 'dummy.c', args = 'prog.c') + diff --git a/meson/test cases/failing/40 kwarg assign/prog.c b/meson/test cases/failing/40 kwarg assign/prog.c new file mode 100644 index 000000000..11b7fad8e --- /dev/null +++ b/meson/test cases/failing/40 kwarg assign/prog.c @@ -0,0 +1,3 @@ +int main(int argc, char **argv) { + return 0; +} diff --git a/meson/test cases/failing/40 kwarg assign/test.json b/meson/test cases/failing/40 kwarg assign/test.json new file mode 100644 index 000000000..671eb3fdb --- /dev/null +++ b/meson/test cases/failing/40 kwarg assign/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/40 kwarg assign/meson.build:3:0: ERROR: Tried to assign values inside an argument list." + } + ] +} diff --git a/meson/test cases/failing/41 custom target plainname many inputs/1.txt b/meson/test cases/failing/41 custom target plainname many inputs/1.txt new file mode 100644 index 000000000..d00491fd7 --- /dev/null +++ b/meson/test cases/failing/41 custom target plainname many inputs/1.txt @@ -0,0 +1 @@ +1 diff --git a/meson/test cases/failing/41 custom target plainname many inputs/2.txt b/meson/test cases/failing/41 custom target plainname many inputs/2.txt new file mode 100644 index 000000000..0cfbf0888 --- /dev/null +++ b/meson/test cases/failing/41 custom target plainname many inputs/2.txt @@ -0,0 +1 @@ +2 diff --git a/meson/test cases/failing/41 custom target plainname many inputs/catfiles.py b/meson/test cases/failing/41 custom target plainname many inputs/catfiles.py new file mode 100644 index 000000000..1c53e24e7 --- /dev/null +++ b/meson/test cases/failing/41 custom target plainname many inputs/catfiles.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 + +import sys + +out = sys.argv[-1] +with open(out, 'wb') as o: + for infile in sys.argv[1:-1]: + with open(infile, 'rb') as f: + o.write(f.read()) diff --git a/meson/test cases/failing/41 custom target plainname many inputs/meson.build b/meson/test cases/failing/41 custom target plainname many inputs/meson.build new file mode 100644 index 000000000..1bcfc0672 --- /dev/null +++ b/meson/test cases/failing/41 custom target plainname many inputs/meson.build @@ -0,0 +1,8 @@ +project('plain name many inputs', 'c') + +catfiles = find_program('catfiles.py') + +custom_target('plainname-inputs', + input : ['1.txt', '2.txt'], + output : '@PLAINNAME@.dat', + command : [catfiles, '@INPUT@', '@OUTPUT@']) diff --git a/meson/test cases/failing/41 custom target plainname many inputs/test.json b/meson/test cases/failing/41 custom target plainname many inputs/test.json new file mode 100644 index 000000000..8c15cda5a --- /dev/null +++ b/meson/test cases/failing/41 custom target plainname many inputs/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/41 custom target plainname many inputs/meson.build:5:0: ERROR: Output cannot contain @PLAINNAME@ or @BASENAME@ when there is more than one input (we can't know which to use)" + } + ] +} diff --git a/meson/test cases/failing/42 custom target outputs not matching install_dirs/generator.py b/meson/test cases/failing/42 custom target outputs not matching install_dirs/generator.py new file mode 100755 index 000000000..4ac61795b --- /dev/null +++ b/meson/test cases/failing/42 custom target outputs not matching install_dirs/generator.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import sys, os + +if len(sys.argv) != 3: + print(sys.argv[0], '', '') + +name = sys.argv[1] +odir = sys.argv[2] + +with open(os.path.join(odir, name + '.h'), 'w') as f: + f.write('int func();\n') +with open(os.path.join(odir, name + '.c'), 'w') as f: + f.write('int main(int argc, char *argv[]) { return 0; }') +with open(os.path.join(odir, name + '.sh'), 'w') as f: + f.write('#!/bin/bash') diff --git a/meson/test cases/failing/42 custom target outputs not matching install_dirs/meson.build b/meson/test cases/failing/42 custom target outputs not matching install_dirs/meson.build new file mode 100644 index 000000000..765e23764 --- /dev/null +++ b/meson/test cases/failing/42 custom target outputs not matching install_dirs/meson.build @@ -0,0 +1,13 @@ +project('outputs not matching install_dirs', 'c') + +gen = find_program('generator.py') + +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST test is only for the ninja backend') +endif + +custom_target('too-few-install-dirs', + output : ['toofew.h', 'toofew.c', 'toofew.sh'], + command : [gen, 'toofew', '@OUTDIR@'], + install : true, + install_dir : [join_paths(get_option('prefix'), get_option('includedir')), false]) diff --git a/meson/test cases/failing/42 custom target outputs not matching install_dirs/test.json b/meson/test cases/failing/42 custom target outputs not matching install_dirs/test.json new file mode 100644 index 000000000..f9e2ba781 --- /dev/null +++ b/meson/test cases/failing/42 custom target outputs not matching install_dirs/test.json @@ -0,0 +1,33 @@ +{ + "installed": [ + { + "type": "file", + "file": "usr/include/diff.h" + }, + { + "type": "file", + "file": "usr/include/first.h" + }, + { + "type": "file", + "file": "usr/bin/diff.sh" + }, + { + "type": "file", + "file": "usr/bin/second.sh" + }, + { + "type": "file", + "file": "opt/same.h" + }, + { + "type": "file", + "file": "opt/same.sh" + } + ], + "stdout": [ + { + "line": "ERROR: Target 'too-few-install-dirs' has 3 outputs: ['toofew.h', 'toofew.c', 'toofew.sh'], but only 2 \"install_dir\"s were found." + } + ] +} diff --git a/meson/test cases/failing/43 project name colon/meson.build b/meson/test cases/failing/43 project name colon/meson.build new file mode 100644 index 000000000..53e947ef2 --- /dev/null +++ b/meson/test cases/failing/43 project name colon/meson.build @@ -0,0 +1 @@ +project('name with :') diff --git a/meson/test cases/failing/43 project name colon/test.json b/meson/test cases/failing/43 project name colon/test.json new file mode 100644 index 000000000..7a5557473 --- /dev/null +++ b/meson/test cases/failing/43 project name colon/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/43 project name colon/meson.build:1:0: ERROR: Project name 'name with :' must not contain ':'" + } + ] +} diff --git a/meson/test cases/failing/44 abs subdir/bob/meson.build b/meson/test cases/failing/44 abs subdir/bob/meson.build new file mode 100644 index 000000000..7bbf4b284 --- /dev/null +++ b/meson/test cases/failing/44 abs subdir/bob/meson.build @@ -0,0 +1,2 @@ +# This file is never reached. +x = 3 diff --git a/meson/test cases/failing/44 abs subdir/meson.build b/meson/test cases/failing/44 abs subdir/meson.build new file mode 100644 index 000000000..8c23224a4 --- /dev/null +++ b/meson/test cases/failing/44 abs subdir/meson.build @@ -0,0 +1,6 @@ +project('abs subdir', 'c') + +# For some reason people insist on doing this, probably +# because Make has taught them to never rely on anything. +subdir(join_paths(meson.source_root(), 'bob')) + diff --git a/meson/test cases/failing/44 abs subdir/test.json b/meson/test cases/failing/44 abs subdir/test.json new file mode 100644 index 000000000..0aa56f692 --- /dev/null +++ b/meson/test cases/failing/44 abs subdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/44 abs subdir/meson.build:5:0: ERROR: Subdir argument must be a relative path." + } + ] +} diff --git a/meson/test cases/failing/45 abspath to srcdir/meson.build b/meson/test cases/failing/45 abspath to srcdir/meson.build new file mode 100644 index 000000000..964a19b56 --- /dev/null +++ b/meson/test cases/failing/45 abspath to srcdir/meson.build @@ -0,0 +1,3 @@ +project('meson', 'c') + +include_directories(meson.current_source_dir()) diff --git a/meson/test cases/failing/45 abspath to srcdir/test.json b/meson/test cases/failing/45 abspath to srcdir/test.json new file mode 100644 index 000000000..177bac1bc --- /dev/null +++ b/meson/test cases/failing/45 abspath to srcdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/45 abspath to srcdir/meson.build:3:0: ERROR: Tried to form an absolute path to a source dir." + } + ] +} diff --git a/meson/test cases/failing/46 pkgconfig variables reserved/meson.build b/meson/test cases/failing/46 pkgconfig variables reserved/meson.build new file mode 100644 index 000000000..82ae995d4 --- /dev/null +++ b/meson/test cases/failing/46 pkgconfig variables reserved/meson.build @@ -0,0 +1,16 @@ +project('variables-reserved-test', 'c', version : '1.0') + +pkgg = import('pkgconfig') +lib = shared_library('simple', 'simple.c') +libver = '1.0' +h = install_headers('simple.h') + +pkgg.generate( + libraries : [lib, '-lz'], + subdirs : '.', + version : libver, + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + variables : [ 'prefix=/tmp/' ] +) diff --git a/meson/test cases/failing/46 pkgconfig variables reserved/simple.c b/meson/test cases/failing/46 pkgconfig variables reserved/simple.c new file mode 100644 index 000000000..e8a6d8330 --- /dev/null +++ b/meson/test cases/failing/46 pkgconfig variables reserved/simple.c @@ -0,0 +1,5 @@ +#include"simple.h" + +int simple_function() { + return 42; +} diff --git a/meson/test cases/failing/46 pkgconfig variables reserved/simple.h b/meson/test cases/failing/46 pkgconfig variables reserved/simple.h new file mode 100644 index 000000000..bb52e6d72 --- /dev/null +++ b/meson/test cases/failing/46 pkgconfig variables reserved/simple.h @@ -0,0 +1,6 @@ +#ifndef SIMPLE_H_ +#define SIMPLE_H_ + +int simple_function(); + +#endif diff --git a/meson/test cases/failing/46 pkgconfig variables reserved/test.json b/meson/test cases/failing/46 pkgconfig variables reserved/test.json new file mode 100644 index 000000000..b92ee17cd --- /dev/null +++ b/meson/test cases/failing/46 pkgconfig variables reserved/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/46 pkgconfig variables reserved/meson.build:8:5: ERROR: Variable \"prefix\" is reserved" + } + ] +} diff --git a/meson/test cases/failing/47 pkgconfig variables zero length/meson.build b/meson/test cases/failing/47 pkgconfig variables zero length/meson.build new file mode 100644 index 000000000..65d33445c --- /dev/null +++ b/meson/test cases/failing/47 pkgconfig variables zero length/meson.build @@ -0,0 +1,16 @@ +project('variables-zero-length-test', 'c', version : '1.0') + +pkgg = import('pkgconfig') +lib = shared_library('simple', 'simple.c') +libver = '1.0' +h = install_headers('simple.h') + +pkgg.generate( + libraries : [lib, '-lz'], + subdirs : '.', + version : libver, + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + variables : [ '=value' ] +) diff --git a/meson/test cases/failing/47 pkgconfig variables zero length/simple.c b/meson/test cases/failing/47 pkgconfig variables zero length/simple.c new file mode 100644 index 000000000..e8a6d8330 --- /dev/null +++ b/meson/test cases/failing/47 pkgconfig variables zero length/simple.c @@ -0,0 +1,5 @@ +#include"simple.h" + +int simple_function() { + return 42; +} diff --git a/meson/test cases/failing/47 pkgconfig variables zero length/simple.h b/meson/test cases/failing/47 pkgconfig variables zero length/simple.h new file mode 100644 index 000000000..bb52e6d72 --- /dev/null +++ b/meson/test cases/failing/47 pkgconfig variables zero length/simple.h @@ -0,0 +1,6 @@ +#ifndef SIMPLE_H_ +#define SIMPLE_H_ + +int simple_function(); + +#endif diff --git a/meson/test cases/failing/47 pkgconfig variables zero length/test.json b/meson/test cases/failing/47 pkgconfig variables zero length/test.json new file mode 100644 index 000000000..39ffde4c6 --- /dev/null +++ b/meson/test cases/failing/47 pkgconfig variables zero length/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/47 pkgconfig variables zero length/meson.build:8:5: ERROR: Empty variable name or value" + } + ] +} diff --git a/meson/test cases/failing/48 pkgconfig variables zero length value/meson.build b/meson/test cases/failing/48 pkgconfig variables zero length value/meson.build new file mode 100644 index 000000000..33977b273 --- /dev/null +++ b/meson/test cases/failing/48 pkgconfig variables zero length value/meson.build @@ -0,0 +1,16 @@ +project('variables-zero-length-value-test', 'c', version : '1.0') + +pkgg = import('pkgconfig') +lib = shared_library('simple', 'simple.c') +libver = '1.0' +h = install_headers('simple.h') + +pkgg.generate( + libraries : [lib, '-lz'], + subdirs : '.', + version : libver, + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + variables : [ 'key=' ] +) diff --git a/meson/test cases/failing/48 pkgconfig variables zero length value/simple.c b/meson/test cases/failing/48 pkgconfig variables zero length value/simple.c new file mode 100644 index 000000000..e8a6d8330 --- /dev/null +++ b/meson/test cases/failing/48 pkgconfig variables zero length value/simple.c @@ -0,0 +1,5 @@ +#include"simple.h" + +int simple_function() { + return 42; +} diff --git a/meson/test cases/failing/48 pkgconfig variables zero length value/simple.h b/meson/test cases/failing/48 pkgconfig variables zero length value/simple.h new file mode 100644 index 000000000..bb52e6d72 --- /dev/null +++ b/meson/test cases/failing/48 pkgconfig variables zero length value/simple.h @@ -0,0 +1,6 @@ +#ifndef SIMPLE_H_ +#define SIMPLE_H_ + +int simple_function(); + +#endif diff --git a/meson/test cases/failing/48 pkgconfig variables zero length value/test.json b/meson/test cases/failing/48 pkgconfig variables zero length value/test.json new file mode 100644 index 000000000..8aa1bc58f --- /dev/null +++ b/meson/test cases/failing/48 pkgconfig variables zero length value/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/48 pkgconfig variables zero length value/meson.build:8:5: ERROR: Empty variable name or value" + } + ] +} diff --git a/meson/test cases/failing/49 pkgconfig variables not key value/meson.build b/meson/test cases/failing/49 pkgconfig variables not key value/meson.build new file mode 100644 index 000000000..02fa7376f --- /dev/null +++ b/meson/test cases/failing/49 pkgconfig variables not key value/meson.build @@ -0,0 +1,16 @@ +project('variables-not-key-value-test', 'c', version : '1.0') + +pkgg = import('pkgconfig') +lib = shared_library('simple', 'simple.c') +libver = '1.0' +h = install_headers('simple.h') + +pkgg.generate( + libraries : [lib, '-lz'], + subdirs : '.', + version : libver, + name : 'libsimple', + filebase : 'simple', + description : 'A simple demo library.', + variables : [ 'this_should_be_key_value' ] +) diff --git a/meson/test cases/failing/49 pkgconfig variables not key value/simple.c b/meson/test cases/failing/49 pkgconfig variables not key value/simple.c new file mode 100644 index 000000000..e8a6d8330 --- /dev/null +++ b/meson/test cases/failing/49 pkgconfig variables not key value/simple.c @@ -0,0 +1,5 @@ +#include"simple.h" + +int simple_function() { + return 42; +} diff --git a/meson/test cases/failing/49 pkgconfig variables not key value/simple.h b/meson/test cases/failing/49 pkgconfig variables not key value/simple.h new file mode 100644 index 000000000..bb52e6d72 --- /dev/null +++ b/meson/test cases/failing/49 pkgconfig variables not key value/simple.h @@ -0,0 +1,6 @@ +#ifndef SIMPLE_H_ +#define SIMPLE_H_ + +int simple_function(); + +#endif diff --git a/meson/test cases/failing/49 pkgconfig variables not key value/test.json b/meson/test cases/failing/49 pkgconfig variables not key value/test.json new file mode 100644 index 000000000..082bd7955 --- /dev/null +++ b/meson/test cases/failing/49 pkgconfig variables not key value/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/49 pkgconfig variables not key value/meson.build:8:5: ERROR: Variable 'this_should_be_key_value' must have a value separated by equals sign." + } + ] +} diff --git a/meson/test cases/failing/5 misplaced option/meson.build b/meson/test cases/failing/5 misplaced option/meson.build new file mode 100644 index 000000000..883de0f02 --- /dev/null +++ b/meson/test cases/failing/5 misplaced option/meson.build @@ -0,0 +1,3 @@ +project('misplaced option', 'c') + +option('dummy', type : 'string') diff --git a/meson/test cases/failing/5 misplaced option/test.json b/meson/test cases/failing/5 misplaced option/test.json new file mode 100644 index 000000000..12afdf025 --- /dev/null +++ b/meson/test cases/failing/5 misplaced option/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/5 misplaced option/meson.build:3:0: ERROR: Tried to call option() in build description file. All options must be in the option file." + } + ] +} diff --git a/meson/test cases/failing/50 executable comparison/meson.build b/meson/test cases/failing/50 executable comparison/meson.build new file mode 100644 index 000000000..041bcf3d3 --- /dev/null +++ b/meson/test cases/failing/50 executable comparison/meson.build @@ -0,0 +1,6 @@ +project('executable comparison', 'c') + +exe1 = executable('prog1', sources : 'prog.c') +exe2 = executable('prog2', sources : 'prog.c') + +assert(exe1 < exe2, 'should fail') diff --git a/meson/test cases/failing/50 executable comparison/prog.c b/meson/test cases/failing/50 executable comparison/prog.c new file mode 100644 index 000000000..0314ff17b --- /dev/null +++ b/meson/test cases/failing/50 executable comparison/prog.c @@ -0,0 +1 @@ +int main(int argc, char **argv) { return 0; } diff --git a/meson/test cases/failing/50 executable comparison/test.json b/meson/test cases/failing/50 executable comparison/test.json new file mode 100644 index 000000000..585b38283 --- /dev/null +++ b/meson/test cases/failing/50 executable comparison/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/50 executable comparison/meson.build:6:0: ERROR: exe1 can only be compared for equality." + } + ] +} diff --git a/meson/test cases/failing/51 inconsistent comparison/meson.build b/meson/test cases/failing/51 inconsistent comparison/meson.build new file mode 100644 index 000000000..7694c2cd2 --- /dev/null +++ b/meson/test cases/failing/51 inconsistent comparison/meson.build @@ -0,0 +1,7 @@ +project('kwarg before arg', 'c') + +# All of these should fail, though only the first one will error out if +# everything's working correctly. +assert([] < 'st', 'should fail') +assert([] < 1, 'should fail') +assert(2 < 'st', 'should fail') diff --git a/meson/test cases/failing/51 inconsistent comparison/test.json b/meson/test cases/failing/51 inconsistent comparison/test.json new file mode 100644 index 000000000..5867f0a05 --- /dev/null +++ b/meson/test cases/failing/51 inconsistent comparison/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/51 inconsistent comparison/meson.build:5:0: ERROR: Values of different types (list, str) cannot be compared using <." + } + ] +} diff --git a/meson/test cases/failing/52 slashname/meson.build b/meson/test cases/failing/52 slashname/meson.build new file mode 100644 index 000000000..bba5301bf --- /dev/null +++ b/meson/test cases/failing/52 slashname/meson.build @@ -0,0 +1,12 @@ +project('slashname', 'c') + +# Traverse this subdir so the corresponding dir +# is created inside the build dir. +subdir('sub') + +# Try to create an executable that would go in the "sub" dir +# inside the build dir. This is prohibited. +executable('sub/prog', pf) + +error('Re-enable me once slash in name is finally prohibited.') + diff --git a/meson/test cases/failing/52 slashname/sub/meson.build b/meson/test cases/failing/52 slashname/sub/meson.build new file mode 100644 index 000000000..e10489001 --- /dev/null +++ b/meson/test cases/failing/52 slashname/sub/meson.build @@ -0,0 +1,2 @@ +pf = files('prog.c') + diff --git a/meson/test cases/failing/52 slashname/sub/prog.c b/meson/test cases/failing/52 slashname/sub/prog.c new file mode 100644 index 000000000..722de0abe --- /dev/null +++ b/meson/test cases/failing/52 slashname/sub/prog.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char **argv) { + printf("I should not be run ever.\n"); + return 1; +} diff --git a/meson/test cases/failing/52 slashname/test.json b/meson/test cases/failing/52 slashname/test.json new file mode 100644 index 000000000..180400ae7 --- /dev/null +++ b/meson/test cases/failing/52 slashname/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/52 slashname/meson.build:11:0: ERROR: Problem encountered: Re-enable me once slash in name is finally prohibited." + } + ] +} diff --git a/meson/test cases/failing/53 reserved meson prefix/meson-foo/meson.build b/meson/test cases/failing/53 reserved meson prefix/meson-foo/meson.build new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/53 reserved meson prefix/meson.build b/meson/test cases/failing/53 reserved meson prefix/meson.build new file mode 100644 index 000000000..1339035ad --- /dev/null +++ b/meson/test cases/failing/53 reserved meson prefix/meson.build @@ -0,0 +1,3 @@ +project('test') + +subdir('meson-foo') diff --git a/meson/test cases/failing/53 reserved meson prefix/test.json b/meson/test cases/failing/53 reserved meson prefix/test.json new file mode 100644 index 000000000..502d96af9 --- /dev/null +++ b/meson/test cases/failing/53 reserved meson prefix/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/53 reserved meson prefix/meson.build:3:0: ERROR: The \"meson-\" prefix is reserved and cannot be used for top-level subdir()." + } + ] +} diff --git a/meson/test cases/failing/54 wrong shared crate type/foo.rs b/meson/test cases/failing/54 wrong shared crate type/foo.rs new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/54 wrong shared crate type/meson.build b/meson/test cases/failing/54 wrong shared crate type/meson.build new file mode 100644 index 000000000..b9fcad4e5 --- /dev/null +++ b/meson/test cases/failing/54 wrong shared crate type/meson.build @@ -0,0 +1,7 @@ +project('test') + +if not add_languages('rust', required: false) + error('MESON_SKIP_TEST test requires rust compiler') +endif + +shared_library('test', 'foo.rs', rust_crate_type : 'staticlib') diff --git a/meson/test cases/failing/54 wrong shared crate type/test.json b/meson/test cases/failing/54 wrong shared crate type/test.json new file mode 100644 index 000000000..5cced6fad --- /dev/null +++ b/meson/test cases/failing/54 wrong shared crate type/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/54 wrong shared crate type/meson.build:7:0: ERROR: Crate type \"staticlib\" invalid for dynamic libraries; must be \"dylib\" or \"cdylib\"" + } + ] +} diff --git a/meson/test cases/failing/55 wrong static crate type/foo.rs b/meson/test cases/failing/55 wrong static crate type/foo.rs new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/55 wrong static crate type/meson.build b/meson/test cases/failing/55 wrong static crate type/meson.build new file mode 100644 index 000000000..109907f96 --- /dev/null +++ b/meson/test cases/failing/55 wrong static crate type/meson.build @@ -0,0 +1,7 @@ +project('test') + +if not add_languages('rust', required: false) + error('MESON_SKIP_TEST test requires rust compiler') +endif + +static_library('test', 'foo.rs', rust_crate_type : 'cdylib') diff --git a/meson/test cases/failing/55 wrong static crate type/test.json b/meson/test cases/failing/55 wrong static crate type/test.json new file mode 100644 index 000000000..7073f7bf0 --- /dev/null +++ b/meson/test cases/failing/55 wrong static crate type/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/55 wrong static crate type/meson.build:7:0: ERROR: Crate type \"cdylib\" invalid for static libraries; must be \"rlib\" or \"staticlib\"" + } + ] +} diff --git a/meson/test cases/failing/56 or on new line/meson.build b/meson/test cases/failing/56 or on new line/meson.build new file mode 100644 index 000000000..12f27058d --- /dev/null +++ b/meson/test cases/failing/56 or on new line/meson.build @@ -0,0 +1,7 @@ +project('silent_or', 'c') + +if get_option('foo') == 'true' + or get_option('foo') == 'auto' +else + message('If this message is printed then something is wrong. The or above should give a syntax error.') +endif diff --git a/meson/test cases/failing/56 or on new line/meson_options.txt b/meson/test cases/failing/56 or on new line/meson_options.txt new file mode 100644 index 000000000..3302cf4ec --- /dev/null +++ b/meson/test cases/failing/56 or on new line/meson_options.txt @@ -0,0 +1 @@ +option('foo', type: 'combo', choices: ['true', 'false', 'auto'], value: 'auto') diff --git a/meson/test cases/failing/56 or on new line/test.json b/meson/test cases/failing/56 or on new line/test.json new file mode 100644 index 000000000..c55cee6eb --- /dev/null +++ b/meson/test cases/failing/56 or on new line/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/56 or on new line/meson.build:4:8: ERROR: Invalid or clause." + } + ] +} diff --git a/meson/test cases/failing/57 link with executable/meson.build b/meson/test cases/failing/57 link with executable/meson.build new file mode 100644 index 000000000..186b3e595 --- /dev/null +++ b/meson/test cases/failing/57 link with executable/meson.build @@ -0,0 +1,4 @@ +project('link with exe', 'c') + +e = executable('prog', 'prog.c') +m = shared_module('module', 'module.c', link_with: e) diff --git a/meson/test cases/failing/57 link with executable/module.c b/meson/test cases/failing/57 link with executable/module.c new file mode 100644 index 000000000..dc0124a24 --- /dev/null +++ b/meson/test cases/failing/57 link with executable/module.c @@ -0,0 +1,4 @@ + +int func(void) { + return 42; +} diff --git a/meson/test cases/failing/57 link with executable/prog.c b/meson/test cases/failing/57 link with executable/prog.c new file mode 100644 index 000000000..f3836d7ba --- /dev/null +++ b/meson/test cases/failing/57 link with executable/prog.c @@ -0,0 +1,5 @@ +int +main (int argc, char **argv) +{ + return 0; +} diff --git a/meson/test cases/failing/57 link with executable/test.json b/meson/test cases/failing/57 link with executable/test.json new file mode 100644 index 000000000..c835a9033 --- /dev/null +++ b/meson/test cases/failing/57 link with executable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/57 link with executable/meson.build:4:0: ERROR: Link target 'prog' is not linkable." + } + ] +} diff --git a/meson/test cases/failing/58 assign custom target index/meson.build b/meson/test cases/failing/58 assign custom target index/meson.build new file mode 100644 index 000000000..7f2a820b8 --- /dev/null +++ b/meson/test cases/failing/58 assign custom target index/meson.build @@ -0,0 +1,24 @@ +# Copyright © 2017 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +prog_python = import('python3').find_python() + +target = custom_target( + 'target', + output : ['1', '2'], + command : [prog_python, '-c', + 'with open("1", "w") as f: f.write("foo"); with open("2", "w") as f: f.write("foo")'], +) + +target[0] = 'foo' diff --git a/meson/test cases/failing/58 assign custom target index/test.json b/meson/test cases/failing/58 assign custom target index/test.json new file mode 100644 index 000000000..b5aa3263c --- /dev/null +++ b/meson/test cases/failing/58 assign custom target index/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/58 assign custom target index/meson.build:24:0: ERROR: Assignment target must be an id." + } + ] +} diff --git a/meson/test cases/failing/59 getoption prefix/meson.build b/meson/test cases/failing/59 getoption prefix/meson.build new file mode 100644 index 000000000..8f85cff2a --- /dev/null +++ b/meson/test cases/failing/59 getoption prefix/meson.build @@ -0,0 +1,5 @@ +project('getopt prefix') + +subproject('abc') + +get_option('abc:foo') diff --git a/meson/test cases/failing/59 getoption prefix/subprojects/abc/meson.build b/meson/test cases/failing/59 getoption prefix/subprojects/abc/meson.build new file mode 100644 index 000000000..aa9c3df0f --- /dev/null +++ b/meson/test cases/failing/59 getoption prefix/subprojects/abc/meson.build @@ -0,0 +1 @@ +project('abc', 'c') diff --git a/meson/test cases/failing/59 getoption prefix/subprojects/abc/meson_options.txt b/meson/test cases/failing/59 getoption prefix/subprojects/abc/meson_options.txt new file mode 100644 index 000000000..89e624e24 --- /dev/null +++ b/meson/test cases/failing/59 getoption prefix/subprojects/abc/meson_options.txt @@ -0,0 +1 @@ +option('foo', type : 'boolean') diff --git a/meson/test cases/failing/59 getoption prefix/test.json b/meson/test cases/failing/59 getoption prefix/test.json new file mode 100644 index 000000000..4485e0465 --- /dev/null +++ b/meson/test cases/failing/59 getoption prefix/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/59 getoption prefix/meson.build:5:0: ERROR: Having a colon in option name is forbidden, projects are not allowed to directly access options of other subprojects." + } + ] +} diff --git a/meson/test cases/failing/6 missing incdir/meson.build b/meson/test cases/failing/6 missing incdir/meson.build new file mode 100644 index 000000000..617ee77bb --- /dev/null +++ b/meson/test cases/failing/6 missing incdir/meson.build @@ -0,0 +1,3 @@ +project('missing incdir', 'c') + +inc = include_directories('nosuchdir') diff --git a/meson/test cases/failing/6 missing incdir/test.json b/meson/test cases/failing/6 missing incdir/test.json new file mode 100644 index 000000000..172d8a9e2 --- /dev/null +++ b/meson/test cases/failing/6 missing incdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/6 missing incdir/meson.build:3:0: ERROR: Include dir nosuchdir does not exist." + } + ] +} diff --git a/meson/test cases/failing/60 bad option argument/meson.build b/meson/test cases/failing/60 bad option argument/meson.build new file mode 100644 index 000000000..5219cfb96 --- /dev/null +++ b/meson/test cases/failing/60 bad option argument/meson.build @@ -0,0 +1,3 @@ +project('bad option') + +get_option('name') diff --git a/meson/test cases/failing/60 bad option argument/meson_options.txt b/meson/test cases/failing/60 bad option argument/meson_options.txt new file mode 100644 index 000000000..de1fff6fb --- /dev/null +++ b/meson/test cases/failing/60 bad option argument/meson_options.txt @@ -0,0 +1 @@ +option('name', type : 'string', vaule : 'foo') diff --git a/meson/test cases/failing/60 bad option argument/test.json b/meson/test cases/failing/60 bad option argument/test.json new file mode 100644 index 000000000..a2b27483e --- /dev/null +++ b/meson/test cases/failing/60 bad option argument/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/60 bad option argument/meson_options.txt:1:0: ERROR: Invalid kwargs for option \"name\": \"vaule\"" + } + ] +} diff --git a/meson/test cases/failing/61 subproj filegrab/meson.build b/meson/test cases/failing/61 subproj filegrab/meson.build new file mode 100644 index 000000000..f38d6c74c --- /dev/null +++ b/meson/test cases/failing/61 subproj filegrab/meson.build @@ -0,0 +1,5 @@ +project('mainproj', 'c') + +# Try to grab a file from a parent project. + +subproject('a') diff --git a/meson/test cases/failing/61 subproj filegrab/prog.c b/meson/test cases/failing/61 subproj filegrab/prog.c new file mode 100644 index 000000000..0314ff17b --- /dev/null +++ b/meson/test cases/failing/61 subproj filegrab/prog.c @@ -0,0 +1 @@ +int main(int argc, char **argv) { return 0; } diff --git a/meson/test cases/failing/61 subproj filegrab/subprojects/a/meson.build b/meson/test cases/failing/61 subproj filegrab/subprojects/a/meson.build new file mode 100644 index 000000000..80b988804 --- /dev/null +++ b/meson/test cases/failing/61 subproj filegrab/subprojects/a/meson.build @@ -0,0 +1,3 @@ +project('a', 'c') + +executable('prog', '../../prog.c') diff --git a/meson/test cases/failing/61 subproj filegrab/test.json b/meson/test cases/failing/61 subproj filegrab/test.json new file mode 100644 index 000000000..600e0d515 --- /dev/null +++ b/meson/test cases/failing/61 subproj filegrab/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/61 subproj filegrab/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file prog.c outside current (sub)project." + } + ] +} diff --git a/meson/test cases/failing/62 grab subproj/meson.build b/meson/test cases/failing/62 grab subproj/meson.build new file mode 100644 index 000000000..30fc69093 --- /dev/null +++ b/meson/test cases/failing/62 grab subproj/meson.build @@ -0,0 +1,7 @@ +project('grabber', 'c') + +# Try to grab a file from a child subproject. + +subproject('foo') + +executable('foo', 'subprojects/foo/sub.c') diff --git a/meson/test cases/failing/62 grab subproj/subprojects/foo/meson.build b/meson/test cases/failing/62 grab subproj/subprojects/foo/meson.build new file mode 100644 index 000000000..b346f6d9a --- /dev/null +++ b/meson/test cases/failing/62 grab subproj/subprojects/foo/meson.build @@ -0,0 +1,3 @@ +project('foo', 'c') + +message('I do nothing.') diff --git a/meson/test cases/failing/62 grab subproj/subprojects/foo/sub.c b/meson/test cases/failing/62 grab subproj/subprojects/foo/sub.c new file mode 100644 index 000000000..a94b1f5ad --- /dev/null +++ b/meson/test cases/failing/62 grab subproj/subprojects/foo/sub.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char **argv) { + printf("I am a subproject executable file.\n"); + return 0; +} diff --git a/meson/test cases/failing/62 grab subproj/test.json b/meson/test cases/failing/62 grab subproj/test.json new file mode 100644 index 000000000..1503ad7c6 --- /dev/null +++ b/meson/test cases/failing/62 grab subproj/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/62 grab subproj/meson.build:7:0: ERROR: Sandbox violation: Tried to grab file sub.c from a nested subproject." + } + ] +} diff --git a/meson/test cases/failing/63 grab sibling/meson.build b/meson/test cases/failing/63 grab sibling/meson.build new file mode 100644 index 000000000..60b926a95 --- /dev/null +++ b/meson/test cases/failing/63 grab sibling/meson.build @@ -0,0 +1,3 @@ +project('master', 'c') + +subproject('a') diff --git a/meson/test cases/failing/63 grab sibling/subprojects/a/meson.build b/meson/test cases/failing/63 grab sibling/subprojects/a/meson.build new file mode 100644 index 000000000..6dd9f61fc --- /dev/null +++ b/meson/test cases/failing/63 grab sibling/subprojects/a/meson.build @@ -0,0 +1,3 @@ +project('a', 'c') + +executable('sneaky', '../b/sneaky.c') diff --git a/meson/test cases/failing/63 grab sibling/subprojects/b/meson.build b/meson/test cases/failing/63 grab sibling/subprojects/b/meson.build new file mode 100644 index 000000000..7c70fe55b --- /dev/null +++ b/meson/test cases/failing/63 grab sibling/subprojects/b/meson.build @@ -0,0 +1,3 @@ +projecT('b', 'c') + +message('I do nothing.') diff --git a/meson/test cases/failing/63 grab sibling/subprojects/b/sneaky.c b/meson/test cases/failing/63 grab sibling/subprojects/b/sneaky.c new file mode 100644 index 000000000..46718c6cc --- /dev/null +++ b/meson/test cases/failing/63 grab sibling/subprojects/b/sneaky.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char **argv) { + printf("I can only come into existence via trickery.\n"); + return 0; +} diff --git a/meson/test cases/failing/63 grab sibling/test.json b/meson/test cases/failing/63 grab sibling/test.json new file mode 100644 index 000000000..91715c7cf --- /dev/null +++ b/meson/test cases/failing/63 grab sibling/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/63 grab sibling/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file sneaky.c outside current (sub)project." + } + ] +} diff --git a/meson/test cases/failing/64 string as link target/meson.build b/meson/test cases/failing/64 string as link target/meson.build new file mode 100644 index 000000000..cb83fff6a --- /dev/null +++ b/meson/test cases/failing/64 string as link target/meson.build @@ -0,0 +1,2 @@ +project('string as link argument', 'c') +executable('myprog', 'prog.c', link_with: [ '' ]) diff --git a/meson/test cases/failing/64 string as link target/prog.c b/meson/test cases/failing/64 string as link target/prog.c new file mode 100644 index 000000000..0314ff17b --- /dev/null +++ b/meson/test cases/failing/64 string as link target/prog.c @@ -0,0 +1 @@ +int main(int argc, char **argv) { return 0; } diff --git a/meson/test cases/failing/64 string as link target/test.json b/meson/test cases/failing/64 string as link target/test.json new file mode 100644 index 000000000..b07a2ea09 --- /dev/null +++ b/meson/test cases/failing/64 string as link target/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/64 string as link target/meson.build:2:0: ERROR: '' is not a target." + } + ] +} diff --git a/meson/test cases/failing/65 dependency not-found and required/meson.build b/meson/test cases/failing/65 dependency not-found and required/meson.build new file mode 100644 index 000000000..1ce574738 --- /dev/null +++ b/meson/test cases/failing/65 dependency not-found and required/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +dep = dependency('', required:true) diff --git a/meson/test cases/failing/65 dependency not-found and required/test.json b/meson/test cases/failing/65 dependency not-found and required/test.json new file mode 100644 index 000000000..ff2096987 --- /dev/null +++ b/meson/test cases/failing/65 dependency not-found and required/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/65 dependency not-found and required/meson.build:2:0: ERROR: Dependency is required but has no candidates." + } + ] +} diff --git a/meson/test cases/failing/66 subproj different versions/main.c b/meson/test cases/failing/66 subproj different versions/main.c new file mode 100644 index 000000000..8793c623a --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/main.c @@ -0,0 +1,9 @@ +#include +#include "a.h" +#include "b.h" + +int main(int argc, char **argv) { + int life = a_fun() + b_fun(); + printf("%d\n", life); + return 0; +} diff --git a/meson/test cases/failing/66 subproj different versions/meson.build b/meson/test cases/failing/66 subproj different versions/meson.build new file mode 100644 index 000000000..e964e423e --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/meson.build @@ -0,0 +1,9 @@ +project('super', 'c') + +# A will use version 1 of C +a_dep = dependency('a', fallback: ['a', 'a_dep']) + +# B will fail because it requests version 2 of C +b_dep = dependency('b', fallback: ['b', 'b_dep']) + +main = executable('main', files('main.c'), dependencies: [a_dep, b_dep]) diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/a/a.c b/meson/test cases/failing/66 subproj different versions/subprojects/a/a.c new file mode 100644 index 000000000..cd41a6588 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/a/a.c @@ -0,0 +1,5 @@ +#include "c.h" + +int a_fun() { + return c_fun(); +} diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/a/a.h b/meson/test cases/failing/66 subproj different versions/subprojects/a/a.h new file mode 100644 index 000000000..8f1d49eda --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/a/a.h @@ -0,0 +1 @@ +int a_fun(); diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/a/meson.build b/meson/test cases/failing/66 subproj different versions/subprojects/a/meson.build new file mode 100644 index 000000000..e84182a07 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/a/meson.build @@ -0,0 +1,11 @@ +project('a', 'c') + +c_dep = dependency('c', version:'1', fallback: ['c', 'c_dep']) + +alib = library('a', 'a.c', + dependencies: c_dep) + +a_dep = declare_dependency( + link_with: alib, + include_directories: include_directories('.'), +) diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/b/b.c b/meson/test cases/failing/66 subproj different versions/subprojects/b/b.c new file mode 100644 index 000000000..f85f8c3f8 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/b/b.c @@ -0,0 +1,5 @@ +#include "c.h" + +int b_fun(){ +return c_fun(); +} diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/b/b.h b/meson/test cases/failing/66 subproj different versions/subprojects/b/b.h new file mode 100644 index 000000000..eced786a0 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/b/b.h @@ -0,0 +1 @@ +int b_fun(); diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/b/meson.build b/meson/test cases/failing/66 subproj different versions/subprojects/b/meson.build new file mode 100644 index 000000000..0398340e6 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/b/meson.build @@ -0,0 +1,11 @@ +project('b', 'c') + +c_dep = dependency('c', version:'2', fallback: ['c', 'c_dep']) + +blib = library('b', 'b.c', + dependencies: c_dep) + +b_dep = declare_dependency( + link_with: blib, + include_directories: include_directories('.'), +) diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/c/c.h b/meson/test cases/failing/66 subproj different versions/subprojects/c/c.h new file mode 100644 index 000000000..2b15f607c --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/c/c.h @@ -0,0 +1,3 @@ +static int c_fun(){ + return 3; +} diff --git a/meson/test cases/failing/66 subproj different versions/subprojects/c/meson.build b/meson/test cases/failing/66 subproj different versions/subprojects/c/meson.build new file mode 100644 index 000000000..7184933b2 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/subprojects/c/meson.build @@ -0,0 +1,5 @@ +project('c', 'c', version:'1') + +c_dep = declare_dependency( + include_directories: include_directories('.') +) diff --git a/meson/test cases/failing/66 subproj different versions/test.json b/meson/test cases/failing/66 subproj different versions/test.json new file mode 100644 index 000000000..19e7b4a49 --- /dev/null +++ b/meson/test cases/failing/66 subproj different versions/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/66 subproj different versions/subprojects/b/meson.build:3:0: ERROR: Dependency 'c' is required but not found." + } + ] +} diff --git a/meson/test cases/failing/67 wrong boost module/meson.build b/meson/test cases/failing/67 wrong boost module/meson.build new file mode 100644 index 000000000..937e58749 --- /dev/null +++ b/meson/test cases/failing/67 wrong boost module/meson.build @@ -0,0 +1,9 @@ +project('boosttest', 'cpp', + default_options : ['cpp_std=c++11']) + +if not dependency('boost', required: false).found() + error('MESON_SKIP_TEST test requires boost') +endif + +# abc doesn't exist +linkdep = dependency('boost', modules : ['thread', 'system', 'test', 'abc']) diff --git a/meson/test cases/failing/67 wrong boost module/test.json b/meson/test cases/failing/67 wrong boost module/test.json new file mode 100644 index 000000000..5f9b21fc8 --- /dev/null +++ b/meson/test cases/failing/67 wrong boost module/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/67 wrong boost module/meson.build:9:0: ERROR: Dependency \"boost\" not found, tried system" + } + ] +} diff --git a/meson/test cases/failing/68 install_data rename bad size/file1.txt b/meson/test cases/failing/68 install_data rename bad size/file1.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/68 install_data rename bad size/file2.txt b/meson/test cases/failing/68 install_data rename bad size/file2.txt new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/68 install_data rename bad size/meson.build b/meson/test cases/failing/68 install_data rename bad size/meson.build new file mode 100644 index 000000000..c7cde087d --- /dev/null +++ b/meson/test cases/failing/68 install_data rename bad size/meson.build @@ -0,0 +1,3 @@ +project('data install test', 'c') + +install_data(['file1.txt', 'file2.txt'], rename : 'just one name') diff --git a/meson/test cases/failing/68 install_data rename bad size/test.json b/meson/test cases/failing/68 install_data rename bad size/test.json new file mode 100644 index 000000000..a3cbb1bf4 --- /dev/null +++ b/meson/test cases/failing/68 install_data rename bad size/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/68 install_data rename bad size/meson.build:3:0: ERROR: \"rename\" and \"sources\" argument lists must be the same length if \"rename\" is given. Rename has 1 elements and sources has 2." + } + ] +} diff --git a/meson/test cases/failing/69 skip only subdir/meson.build b/meson/test cases/failing/69 skip only subdir/meson.build new file mode 100644 index 000000000..4832bd49c --- /dev/null +++ b/meson/test cases/failing/69 skip only subdir/meson.build @@ -0,0 +1,8 @@ +# Check that skip_rest only exits subdir, not the whole script. +# Should create an error because main.cpp does not exists. +project('example exit', 'cpp') + +subdir('subdir') + +message('Good') +executable('main', 'main.cpp') diff --git a/meson/test cases/failing/69 skip only subdir/subdir/meson.build b/meson/test cases/failing/69 skip only subdir/subdir/meson.build new file mode 100644 index 000000000..1ba447b22 --- /dev/null +++ b/meson/test cases/failing/69 skip only subdir/subdir/meson.build @@ -0,0 +1,3 @@ +subdir_done() + +error('Unreachable') diff --git a/meson/test cases/failing/69 skip only subdir/test.json b/meson/test cases/failing/69 skip only subdir/test.json new file mode 100644 index 000000000..134971a39 --- /dev/null +++ b/meson/test cases/failing/69 skip only subdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/69 skip only subdir/meson.build:8:0: ERROR: File main.cpp does not exist." + } + ] +} diff --git a/meson/test cases/failing/7 go to subproject/meson.build b/meson/test cases/failing/7 go to subproject/meson.build new file mode 100644 index 000000000..205cc5ecf --- /dev/null +++ b/meson/test cases/failing/7 go to subproject/meson.build @@ -0,0 +1,3 @@ +project('fff', 'c') + +subdir('subprojects') diff --git a/meson/test cases/failing/7 go to subproject/subprojects/meson.build b/meson/test cases/failing/7 go to subproject/subprojects/meson.build new file mode 100644 index 000000000..120344f79 --- /dev/null +++ b/meson/test cases/failing/7 go to subproject/subprojects/meson.build @@ -0,0 +1 @@ +x = 'x' diff --git a/meson/test cases/failing/7 go to subproject/test.json b/meson/test cases/failing/7 go to subproject/test.json new file mode 100644 index 000000000..c25475715 --- /dev/null +++ b/meson/test cases/failing/7 go to subproject/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/7 go to subproject/meson.build:3:0: ERROR: Must not go into subprojects dir with subdir(), use subproject() instead." + } + ] +} diff --git a/meson/test cases/failing/70 dual override/meson.build b/meson/test cases/failing/70 dual override/meson.build new file mode 100644 index 000000000..e5f86baaf --- /dev/null +++ b/meson/test cases/failing/70 dual override/meson.build @@ -0,0 +1,5 @@ +project('yo dawg', 'c') + +p = find_program('overrides.py') +meson.override_find_program('override', p) +meson.override_find_program('override', p) diff --git a/meson/test cases/failing/70 dual override/overrides.py b/meson/test cases/failing/70 dual override/overrides.py new file mode 100644 index 000000000..49e9b7ad6 --- /dev/null +++ b/meson/test cases/failing/70 dual override/overrides.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 + +print('Yo dawg, we put overrides in your overrides,') +print('so now you can override when you override.') diff --git a/meson/test cases/failing/70 dual override/test.json b/meson/test cases/failing/70 dual override/test.json new file mode 100644 index 000000000..e955dc0fb --- /dev/null +++ b/meson/test cases/failing/70 dual override/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/70 dual override/meson.build:5:6: ERROR: Tried to override executable \"override\" which has already been overridden." + } + ] +} diff --git a/meson/test cases/failing/71 override used/meson.build b/meson/test cases/failing/71 override used/meson.build new file mode 100644 index 000000000..61885bba1 --- /dev/null +++ b/meson/test cases/failing/71 override used/meson.build @@ -0,0 +1,5 @@ +project('overridde an already found exe', 'c') + +old = find_program('something.py') +replacement = find_program('other.py') +meson.override_find_program('something.py', replacement) diff --git a/meson/test cases/failing/71 override used/other.py b/meson/test cases/failing/71 override used/other.py new file mode 100755 index 000000000..f62ba960d --- /dev/null +++ b/meson/test cases/failing/71 override used/other.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +print('Doing something else.') diff --git a/meson/test cases/failing/71 override used/something.py b/meson/test cases/failing/71 override used/something.py new file mode 100755 index 000000000..64c9454c3 --- /dev/null +++ b/meson/test cases/failing/71 override used/something.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +print('Doing something.') diff --git a/meson/test cases/failing/71 override used/test.json b/meson/test cases/failing/71 override used/test.json new file mode 100644 index 000000000..6c734e432 --- /dev/null +++ b/meson/test cases/failing/71 override used/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/71 override used/meson.build:5:6: ERROR: Tried to override finding of executable \"something.py\" which has already been found." + } + ] +} diff --git a/meson/test cases/failing/72 run_command unclean exit/meson.build b/meson/test cases/failing/72 run_command unclean exit/meson.build new file mode 100644 index 000000000..4bc02ae7b --- /dev/null +++ b/meson/test cases/failing/72 run_command unclean exit/meson.build @@ -0,0 +1,4 @@ +project('run_command unclean exit', 'c') + +rcprog = find_program('./returncode.py') +run_command(rcprog, '1', check : true) diff --git a/meson/test cases/failing/72 run_command unclean exit/returncode.py b/meson/test cases/failing/72 run_command unclean exit/returncode.py new file mode 100755 index 000000000..84dbc5df6 --- /dev/null +++ b/meson/test cases/failing/72 run_command unclean exit/returncode.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 + +import sys +exit(int(sys.argv[1])) diff --git a/meson/test cases/failing/72 run_command unclean exit/test.json b/meson/test cases/failing/72 run_command unclean exit/test.json new file mode 100644 index 000000000..67a80f425 --- /dev/null +++ b/meson/test cases/failing/72 run_command unclean exit/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/72 run_command unclean exit/meson\\.build:4:0: ERROR: Command \".*[\\\\/]test cases[\\\\/]failing[\\\\/]72 run_command unclean exit[\\\\/]\\.[\\\\/]returncode\\.py 1\" failed with status 1\\." + } + ] +} diff --git a/meson/test cases/failing/73 int literal leading zero/meson.build b/meson/test cases/failing/73 int literal leading zero/meson.build new file mode 100644 index 000000000..7ad64ae09 --- /dev/null +++ b/meson/test cases/failing/73 int literal leading zero/meson.build @@ -0,0 +1,6 @@ + +# This should fail. +# Decimal syntax is 123. +# Octal syntax is 0o123. +fail_0123 = 0123 + diff --git a/meson/test cases/failing/73 int literal leading zero/test.json b/meson/test cases/failing/73 int literal leading zero/test.json new file mode 100644 index 000000000..bc41165d7 --- /dev/null +++ b/meson/test cases/failing/73 int literal leading zero/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "comment": "this error message is not very informative", + "line": "test cases/failing/73 int literal leading zero/meson.build:5:13: ERROR: Expecting eof got number." + } + ] +} diff --git a/meson/test cases/failing/74 configuration immutable/input b/meson/test cases/failing/74 configuration immutable/input new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/74 configuration immutable/meson.build b/meson/test cases/failing/74 configuration immutable/meson.build new file mode 100644 index 000000000..b6cac4126 --- /dev/null +++ b/meson/test cases/failing/74 configuration immutable/meson.build @@ -0,0 +1,12 @@ +project('configuration_data is immutable') + +a = configuration_data() + +configure_file( + configuration : a, + input : 'input', + output : 'output', +) + +still_immutable = a +still_immutable.set('hello', 'world') diff --git a/meson/test cases/failing/74 configuration immutable/test.json b/meson/test cases/failing/74 configuration immutable/test.json new file mode 100644 index 000000000..1dd172aec --- /dev/null +++ b/meson/test cases/failing/74 configuration immutable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/74 configuration immutable/meson.build:12:16: ERROR: Can not set values on configuration object that has been used." + } + ] +} diff --git a/meson/test cases/failing/75 link with shared module on osx/meson.build b/meson/test cases/failing/75 link with shared module on osx/meson.build new file mode 100644 index 000000000..bf18b3626 --- /dev/null +++ b/meson/test cases/failing/75 link with shared module on osx/meson.build @@ -0,0 +1,8 @@ +project('link with shared module', 'c') + +if host_machine.system() != 'darwin' + error('MESON_SKIP_TEST test only fails on OSX') +endif + +m = shared_module('mymodule', 'module.c') +e = executable('prog', 'prog.c', link_with : m) diff --git a/meson/test cases/failing/75 link with shared module on osx/module.c b/meson/test cases/failing/75 link with shared module on osx/module.c new file mode 100644 index 000000000..81b0d5af0 --- /dev/null +++ b/meson/test cases/failing/75 link with shared module on osx/module.c @@ -0,0 +1,3 @@ +int func(void) { + return 1496; +} diff --git a/meson/test cases/failing/75 link with shared module on osx/prog.c b/meson/test cases/failing/75 link with shared module on osx/prog.c new file mode 100644 index 000000000..8164d8da1 --- /dev/null +++ b/meson/test cases/failing/75 link with shared module on osx/prog.c @@ -0,0 +1,4 @@ + +int main(int argc, char **argv) { + return func(); +} diff --git a/meson/test cases/failing/75 link with shared module on osx/test.json b/meson/test cases/failing/75 link with shared module on osx/test.json new file mode 100644 index 000000000..7db17d8d7 --- /dev/null +++ b/meson/test cases/failing/75 link with shared module on osx/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/75 link with shared module on osx/meson.build:8:0: ERROR: target links against shared modules. This is not permitted on OSX" + } + ] +} diff --git a/meson/test cases/failing/76 non ascii in ascii encoded configure file/config9.h.in b/meson/test cases/failing/76 non ascii in ascii encoded configure file/config9.h.in new file mode 100644 index 000000000..323bec64a --- /dev/null +++ b/meson/test cases/failing/76 non ascii in ascii encoded configure file/config9.h.in @@ -0,0 +1 @@ +#define MESSAGE "@var@" diff --git a/meson/test cases/failing/76 non ascii in ascii encoded configure file/meson.build b/meson/test cases/failing/76 non ascii in ascii encoded configure file/meson.build new file mode 100644 index 000000000..846daaf89 --- /dev/null +++ b/meson/test cases/failing/76 non ascii in ascii encoded configure file/meson.build @@ -0,0 +1,10 @@ +project('non acsii to ascii encoding', 'c') +# Writing a non ASCII character with a ASCII encoding should fail +conf9 = configuration_data() +conf9.set('var', 'д') +configure_file( + input : 'config9.h.in', + output : '@BASENAME@', + encoding : 'ascii', + configuration : conf9 +) diff --git a/meson/test cases/failing/76 non ascii in ascii encoded configure file/test.json b/meson/test cases/failing/76 non ascii in ascii encoded configure file/test.json new file mode 100644 index 000000000..44e6377a7 --- /dev/null +++ b/meson/test cases/failing/76 non ascii in ascii encoded configure file/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/76 non ascii in ascii encoded configure file/meson\\.build:5:0: ERROR: Could not write output file .*[\\\\/]config9\\.h: 'ascii' codec can't encode character '\\\\u0434' in position 17: ordinal not in range\\(128\\)" + } + ] +} diff --git a/meson/test cases/failing/77 subproj dependency not-found and required/meson.build b/meson/test cases/failing/77 subproj dependency not-found and required/meson.build new file mode 100644 index 000000000..c5a296104 --- /dev/null +++ b/meson/test cases/failing/77 subproj dependency not-found and required/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +missing = dependency('', fallback: ['missing', 'missing_dep'], required: true) diff --git a/meson/test cases/failing/77 subproj dependency not-found and required/test.json b/meson/test cases/failing/77 subproj dependency not-found and required/test.json new file mode 100644 index 000000000..5365f34e5 --- /dev/null +++ b/meson/test cases/failing/77 subproj dependency not-found and required/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/77 subproj dependency not-found and required/meson.build:2:0: ERROR: Neither a subproject directory nor a missing.wrap file was found." + } + ] +} diff --git a/meson/test cases/failing/78 unfound run/meson.build b/meson/test cases/failing/78 unfound run/meson.build new file mode 100644 index 000000000..3f37e9a06 --- /dev/null +++ b/meson/test cases/failing/78 unfound run/meson.build @@ -0,0 +1,4 @@ +project('unfound runtarget') + +exe = find_program('nonexisting_prog', required : false) +run_target('invoke_fail', command : [exe]) diff --git a/meson/test cases/failing/78 unfound run/test.json b/meson/test cases/failing/78 unfound run/test.json new file mode 100644 index 000000000..0cdcdf209 --- /dev/null +++ b/meson/test cases/failing/78 unfound run/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/78 unfound run/meson.build:4:0: ERROR: Tried to use non-existing executable 'nonexisting_prog'" + } + ] +} diff --git a/meson/test cases/failing/79 framework dependency with version/meson.build b/meson/test cases/failing/79 framework dependency with version/meson.build new file mode 100644 index 000000000..b7e04bab4 --- /dev/null +++ b/meson/test cases/failing/79 framework dependency with version/meson.build @@ -0,0 +1,8 @@ +project('framework dependency with version', 'c') + +if host_machine.system() != 'darwin' + error('MESON_SKIP_TEST test only applicable on darwin') +endif + +# do individual frameworks have a meaningful version to test? And multiple frameworks might be listed... +dep = dependency('appleframeworks', modules: 'foundation', version: '>0') diff --git a/meson/test cases/failing/79 framework dependency with version/test.json b/meson/test cases/failing/79 framework dependency with version/test.json new file mode 100644 index 000000000..9b0d335d7 --- /dev/null +++ b/meson/test cases/failing/79 framework dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/79 framework dependency with version/meson.build:8:0: ERROR: Unknown version of dependency 'appleframeworks', but need ['>0']." + } + ] +} diff --git a/meson/test cases/failing/8 recursive/meson.build b/meson/test cases/failing/8 recursive/meson.build new file mode 100644 index 000000000..f3152946d --- /dev/null +++ b/meson/test cases/failing/8 recursive/meson.build @@ -0,0 +1,3 @@ +project('recursive', 'c') + +a = subproject('a') diff --git a/meson/test cases/failing/8 recursive/subprojects/a/meson.build b/meson/test cases/failing/8 recursive/subprojects/a/meson.build new file mode 100644 index 000000000..7c6040bc6 --- /dev/null +++ b/meson/test cases/failing/8 recursive/subprojects/a/meson.build @@ -0,0 +1,3 @@ +project('a', 'c') + +b = subproject('b') diff --git a/meson/test cases/failing/8 recursive/subprojects/b/meson.build b/meson/test cases/failing/8 recursive/subprojects/b/meson.build new file mode 100644 index 000000000..d0beeb7d9 --- /dev/null +++ b/meson/test cases/failing/8 recursive/subprojects/b/meson.build @@ -0,0 +1,3 @@ +project('b', 'c') + +a = subproject('a') diff --git a/meson/test cases/failing/8 recursive/test.json b/meson/test cases/failing/8 recursive/test.json new file mode 100644 index 000000000..b4c964c0f --- /dev/null +++ b/meson/test cases/failing/8 recursive/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/8 recursive/subprojects/b/meson.build:3:0: ERROR: Recursive include of subprojects: a => b => a." + } + ] +} diff --git a/meson/test cases/failing/80 override exe config/foo.c b/meson/test cases/failing/80 override exe config/foo.c new file mode 100644 index 000000000..03b2213bb --- /dev/null +++ b/meson/test cases/failing/80 override exe config/foo.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/failing/80 override exe config/meson.build b/meson/test cases/failing/80 override exe config/meson.build new file mode 100644 index 000000000..29a74166b --- /dev/null +++ b/meson/test cases/failing/80 override exe config/meson.build @@ -0,0 +1,6 @@ +project('myexe', 'c') + +foo = executable('foo', 'foo.c') +meson.override_find_program('bar', foo) +bar = find_program('bar') +run_command(bar) diff --git a/meson/test cases/failing/80 override exe config/test.json b/meson/test cases/failing/80 override exe config/test.json new file mode 100644 index 000000000..36c789e9d --- /dev/null +++ b/meson/test cases/failing/80 override exe config/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/80 override exe config/meson.build:6:0: ERROR: Program 'bar' was overridden with the compiled executable 'foo' and therefore cannot be used during configuration" + } + ] +} diff --git a/meson/test cases/failing/81 gl dependency with version/meson.build b/meson/test cases/failing/81 gl dependency with version/meson.build new file mode 100644 index 000000000..012709302 --- /dev/null +++ b/meson/test cases/failing/81 gl dependency with version/meson.build @@ -0,0 +1,9 @@ +project('gl dependency with version', 'c') + +host_system = host_machine.system() +if host_system != 'windows' and host_system != 'darwin' + error('MESON_SKIP_TEST: test only fails on Windows and OSX') +endif + +# gl dependency found via system method doesn't have a meaningful version to check +dep = dependency('gl', method: 'system', version: '>0') diff --git a/meson/test cases/failing/81 gl dependency with version/test.json b/meson/test cases/failing/81 gl dependency with version/test.json new file mode 100644 index 000000000..86da7359f --- /dev/null +++ b/meson/test cases/failing/81 gl dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/81 gl dependency with version/meson.build:9:0: ERROR: Unknown version of dependency 'gl', but need ['>0']." + } + ] +} diff --git a/meson/test cases/failing/82 threads dependency with version/meson.build b/meson/test cases/failing/82 threads dependency with version/meson.build new file mode 100644 index 000000000..6023faeb6 --- /dev/null +++ b/meson/test cases/failing/82 threads dependency with version/meson.build @@ -0,0 +1,3 @@ +project('threads dependency with version', 'c') +# threads dependency doesn't have a meaningful version to check +dep = dependency('threads', version: '>0') diff --git a/meson/test cases/failing/82 threads dependency with version/test.json b/meson/test cases/failing/82 threads dependency with version/test.json new file mode 100644 index 000000000..9b71468ea --- /dev/null +++ b/meson/test cases/failing/82 threads dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/82 threads dependency with version/meson.build:3:0: ERROR: Unknown version of dependency 'threads', but need ['>0']." + } + ] +} diff --git a/meson/test cases/failing/83 gtest dependency with version/meson.build b/meson/test cases/failing/83 gtest dependency with version/meson.build new file mode 100644 index 000000000..b43a04733 --- /dev/null +++ b/meson/test cases/failing/83 gtest dependency with version/meson.build @@ -0,0 +1,8 @@ +project('gtest dependency with version', ['c', 'cpp']) + +if not dependency('gtest', method: 'system', required: false).found() + error('MESON_SKIP_TEST test requires gtest') +endif + +# discovering gtest version is not yet implemented +dep = dependency('gtest', method: 'system', version: '>0') diff --git a/meson/test cases/failing/83 gtest dependency with version/test.json b/meson/test cases/failing/83 gtest dependency with version/test.json new file mode 100644 index 000000000..4a44077e2 --- /dev/null +++ b/meson/test cases/failing/83 gtest dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/83 gtest dependency with version/meson.build:8:0: ERROR: Dependency 'gtest' is required but not found." + } + ] +} diff --git a/meson/test cases/failing/84 dub libray/meson.build b/meson/test cases/failing/84 dub libray/meson.build new file mode 100644 index 000000000..306d5b3e5 --- /dev/null +++ b/meson/test cases/failing/84 dub libray/meson.build @@ -0,0 +1,11 @@ +project('dub') + +if not add_languages('d', required: false) + error('MESON_SKIP_TEST test requires D compiler') +endif + +if not find_program('dub', required: false).found() + error('MESON_SKIP_TEST test requires dub') +endif + +dependency('dubtestproject', method: 'dub') # Not library (none) diff --git a/meson/test cases/failing/84 dub libray/test.json b/meson/test cases/failing/84 dub libray/test.json new file mode 100644 index 000000000..23fe6e5f6 --- /dev/null +++ b/meson/test cases/failing/84 dub libray/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/84 dub libray/meson.build:11:0: ERROR: Dependency \"dubtestproject\" not found" + } + ] +} diff --git a/meson/test cases/failing/85 dub executable/meson.build b/meson/test cases/failing/85 dub executable/meson.build new file mode 100644 index 000000000..9a134ea21 --- /dev/null +++ b/meson/test cases/failing/85 dub executable/meson.build @@ -0,0 +1,11 @@ +project('dub') + +if not add_languages('d', required: false) + error('MESON_SKIP_TEST test requires D compiler') +endif + +if not find_program('dub', required: false).found() + error('MESON_SKIP_TEST test requires dub') +endif + +dependency('dubtestproject:test1', method: 'dub') # Not library (executable) diff --git a/meson/test cases/failing/85 dub executable/test.json b/meson/test cases/failing/85 dub executable/test.json new file mode 100644 index 000000000..4a8674c34 --- /dev/null +++ b/meson/test cases/failing/85 dub executable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/85 dub executable/meson.build:11:0: ERROR: Dependency \"dubtestproject:test1\" not found" + } + ] +} diff --git a/meson/test cases/failing/86 dub compiler/meson.build b/meson/test cases/failing/86 dub compiler/meson.build new file mode 100644 index 000000000..36f1849e5 --- /dev/null +++ b/meson/test cases/failing/86 dub compiler/meson.build @@ -0,0 +1,17 @@ +project('dub') + +if not add_languages('d', required: false) + error('MESON_SKIP_TEST test requires D compiler') +endif + +if meson.get_compiler('d').get_id() == 'dmd' + if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' + error('MESON_SKIP_TEST Windows test environment lacks multiple D compilers.') + endif +endif + +if not find_program('dub', required: false).found() + error('MESON_SKIP_TEST test requires dub') +endif + +dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch diff --git a/meson/test cases/failing/86 dub compiler/test.json b/meson/test cases/failing/86 dub compiler/test.json new file mode 100644 index 000000000..ab6caff40 --- /dev/null +++ b/meson/test cases/failing/86 dub compiler/test.json @@ -0,0 +1,19 @@ +{ + "matrix": { + "options": { + "warning_level": [ + { + "val": "1", + "skip_on_env": [ + "SINGLE_DUB_COMPILER" + ] + } + ] + } + }, + "stdout": [ + { + "line": "test cases/failing/86 dub compiler/meson.build:17:0: ERROR: Dependency \"dubtestproject:test2\" not found" + } + ] +} diff --git a/meson/test cases/failing/87 subproj not-found dep/meson.build b/meson/test cases/failing/87 subproj not-found dep/meson.build new file mode 100644 index 000000000..2b17df17e --- /dev/null +++ b/meson/test cases/failing/87 subproj not-found dep/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +missing = dependency('', fallback: ['somesubproj', 'notfound_dep'], required: true) diff --git a/meson/test cases/failing/87 subproj not-found dep/subprojects/somesubproj/meson.build b/meson/test cases/failing/87 subproj not-found dep/subprojects/somesubproj/meson.build new file mode 100644 index 000000000..5f451f401 --- /dev/null +++ b/meson/test cases/failing/87 subproj not-found dep/subprojects/somesubproj/meson.build @@ -0,0 +1,3 @@ +project('dep', 'c') + +notfound_dep = dependency('', required : false) diff --git a/meson/test cases/failing/87 subproj not-found dep/test.json b/meson/test cases/failing/87 subproj not-found dep/test.json new file mode 100644 index 000000000..160e9d351 --- /dev/null +++ b/meson/test cases/failing/87 subproj not-found dep/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/87 subproj not-found dep/meson.build:2:0: ERROR: Dependency '(anonymous)' is required but not found." + } + ] +} diff --git a/meson/test cases/failing/88 invalid configure file/input b/meson/test cases/failing/88 invalid configure file/input new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/88 invalid configure file/meson.build b/meson/test cases/failing/88 invalid configure file/meson.build new file mode 100644 index 000000000..08eca2bd0 --- /dev/null +++ b/meson/test cases/failing/88 invalid configure file/meson.build @@ -0,0 +1,9 @@ +project('invalid configura file') + +configure_file( + configuration : configuration_data(), + input : 'input', + output : 'output', + install_dir : '', + install : true, +) diff --git a/meson/test cases/failing/88 invalid configure file/test.json b/meson/test cases/failing/88 invalid configure file/test.json new file mode 100644 index 000000000..b551f61ae --- /dev/null +++ b/meson/test cases/failing/88 invalid configure file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/88 invalid configure file/meson.build:3:0: ERROR: \"install_dir\" must be specified when \"install\" in a configure_file is true" + } + ] +} diff --git a/meson/test cases/failing/89 kwarg dupe/meson.build b/meson/test cases/failing/89 kwarg dupe/meson.build new file mode 100644 index 000000000..06821a278 --- /dev/null +++ b/meson/test cases/failing/89 kwarg dupe/meson.build @@ -0,0 +1,6 @@ +project('dupe kwarg', 'c') + +dupedict = {'install': true} + +executable('prog', 'prog.c', install: true, + kwargs: dupedict) diff --git a/meson/test cases/failing/89 kwarg dupe/prog.c b/meson/test cases/failing/89 kwarg dupe/prog.c new file mode 100644 index 000000000..5f3fbe6a2 --- /dev/null +++ b/meson/test cases/failing/89 kwarg dupe/prog.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char **argv) { + printf("I don't get built. It makes me saaaaaad. :(\n"); + return 0; +} diff --git a/meson/test cases/failing/89 kwarg dupe/test.json b/meson/test cases/failing/89 kwarg dupe/test.json new file mode 100644 index 000000000..ec4660043 --- /dev/null +++ b/meson/test cases/failing/89 kwarg dupe/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/89 kwarg dupe/meson.build:5:0: ERROR: Entry \"install\" defined both as a keyword argument and in a \"kwarg\" entry." + } + ] +} diff --git a/meson/test cases/failing/9 missing extra file/meson.build b/meson/test cases/failing/9 missing extra file/meson.build new file mode 100644 index 000000000..725bec8f7 --- /dev/null +++ b/meson/test cases/failing/9 missing extra file/meson.build @@ -0,0 +1,3 @@ +project('missing extra file', 'c') + +executable('myprog', 'prog.c', extra_files : 'missing.txt') diff --git a/meson/test cases/failing/9 missing extra file/prog.c b/meson/test cases/failing/9 missing extra file/prog.c new file mode 100644 index 000000000..11b7fad8e --- /dev/null +++ b/meson/test cases/failing/9 missing extra file/prog.c @@ -0,0 +1,3 @@ +int main(int argc, char **argv) { + return 0; +} diff --git a/meson/test cases/failing/9 missing extra file/test.json b/meson/test cases/failing/9 missing extra file/test.json new file mode 100644 index 000000000..188b6a605 --- /dev/null +++ b/meson/test cases/failing/9 missing extra file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/9 missing extra file/meson.build:3:0: ERROR: File missing.txt does not exist." + } + ] +} diff --git a/meson/test cases/failing/90 missing pch file/meson.build b/meson/test cases/failing/90 missing pch file/meson.build new file mode 100644 index 000000000..a67b79877 --- /dev/null +++ b/meson/test cases/failing/90 missing pch file/meson.build @@ -0,0 +1,3 @@ +project('pch test', 'c') +exe = executable('prog', 'prog.c', +c_pch : ['pch/prog_pch.c', 'pch/prog.h']) diff --git a/meson/test cases/failing/90 missing pch file/prog.c b/meson/test cases/failing/90 missing pch file/prog.c new file mode 100644 index 000000000..11b7fad8e --- /dev/null +++ b/meson/test cases/failing/90 missing pch file/prog.c @@ -0,0 +1,3 @@ +int main(int argc, char **argv) { + return 0; +} diff --git a/meson/test cases/failing/90 missing pch file/test.json b/meson/test cases/failing/90 missing pch file/test.json new file mode 100644 index 000000000..f55eb4785 --- /dev/null +++ b/meson/test cases/failing/90 missing pch file/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "comment": "literal 'pch/prog.h' from meson.build appears in output, irrespective of os.path.sep", + "line": "test cases/failing/90 missing pch file/meson.build:2:0: ERROR: File pch/prog.h does not exist." + } + ] +} diff --git a/meson/test cases/failing/91 pch source different folder/include/pch.h b/meson/test cases/failing/91 pch source different folder/include/pch.h new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/91 pch source different folder/meson.build b/meson/test cases/failing/91 pch source different folder/meson.build new file mode 100644 index 000000000..d32071772 --- /dev/null +++ b/meson/test cases/failing/91 pch source different folder/meson.build @@ -0,0 +1,5 @@ +project('pch', 'c') +# It is not allowed to have the PCH implementation in a different +# folder than the header. +exe = executable('prog', 'prog.c', + c_pch : ['include/pch.h', 'src/pch.c']) diff --git a/meson/test cases/failing/91 pch source different folder/prog.c b/meson/test cases/failing/91 pch source different folder/prog.c new file mode 100644 index 000000000..3fb1295e4 --- /dev/null +++ b/meson/test cases/failing/91 pch source different folder/prog.c @@ -0,0 +1 @@ +int main(void) {} \ No newline at end of file diff --git a/meson/test cases/failing/91 pch source different folder/src/pch.c b/meson/test cases/failing/91 pch source different folder/src/pch.c new file mode 100644 index 000000000..e69de29bb diff --git a/meson/test cases/failing/91 pch source different folder/test.json b/meson/test cases/failing/91 pch source different folder/test.json new file mode 100644 index 000000000..6309f6c66 --- /dev/null +++ b/meson/test cases/failing/91 pch source different folder/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/91 pch source different folder/meson.build:4:0: ERROR: PCH files must be stored in the same folder." + } + ] +} diff --git a/meson/test cases/failing/92 unknown config tool/meson.build b/meson/test cases/failing/92 unknown config tool/meson.build new file mode 100644 index 000000000..536976e35 --- /dev/null +++ b/meson/test cases/failing/92 unknown config tool/meson.build @@ -0,0 +1,2 @@ +project('no-such-config-tool') +dependency('no-such-config-tool', method:'config-tool') diff --git a/meson/test cases/failing/92 unknown config tool/test.json b/meson/test cases/failing/92 unknown config tool/test.json new file mode 100644 index 000000000..c4484f322 --- /dev/null +++ b/meson/test cases/failing/92 unknown config tool/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/92 unknown config tool/meson.build:2:0: ERROR: Dependency \"no-such-config-tool\" not found" + } + ] +} diff --git a/meson/test cases/failing/93 custom target install data/Info.plist.cpp b/meson/test cases/failing/93 custom target install data/Info.plist.cpp new file mode 100644 index 000000000..9ca2fcbaf --- /dev/null +++ b/meson/test cases/failing/93 custom target install data/Info.plist.cpp @@ -0,0 +1 @@ +Some data which gets processed before installation diff --git a/meson/test cases/failing/93 custom target install data/meson.build b/meson/test cases/failing/93 custom target install data/meson.build new file mode 100644 index 000000000..00d348cc0 --- /dev/null +++ b/meson/test cases/failing/93 custom target install data/meson.build @@ -0,0 +1,11 @@ +project('custom target install data') + +preproc = find_program('preproc.py') + +t = custom_target('Info.plist', + command: [preproc, '@INPUT@', '@OUTPUT@'], + input: 'Info.plist.cpp', + output: 'Info.plist', +) + +install_data(t) diff --git a/meson/test cases/failing/93 custom target install data/preproc.py b/meson/test cases/failing/93 custom target install data/preproc.py new file mode 100644 index 000000000..e6eba4c6a --- /dev/null +++ b/meson/test cases/failing/93 custom target install data/preproc.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +import sys + +if len(sys.argv) != 3: + print(sys.argv[0], '', '') + +inf = sys.argv[1] +outf = sys.argv[2] + +with open(outf, 'wb') as o: + with open(inf, 'rb') as i: + o.write(i.read()) diff --git a/meson/test cases/failing/93 custom target install data/test.json b/meson/test cases/failing/93 custom target install data/test.json new file mode 100644 index 000000000..caeafb159 --- /dev/null +++ b/meson/test cases/failing/93 custom target install data/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/93 custom target install data/meson.build:11:0: ERROR: install_data argument 1 was of type \"CustomTarget\" but should have been one of: \"str\", \"File\"" + } + ] +} diff --git a/meson/test cases/failing/94 add dict non string key/meson.build b/meson/test cases/failing/94 add dict non string key/meson.build new file mode 100644 index 000000000..c81a3f764 --- /dev/null +++ b/meson/test cases/failing/94 add dict non string key/meson.build @@ -0,0 +1,9 @@ +project('add dictionary entry using non-string key') + +dict = {} + +# An integer variable to be used as a key +key = 1 + +# Add new entry using integer variable as key should fail +dict += {key : 'myValue'} \ No newline at end of file diff --git a/meson/test cases/failing/94 add dict non string key/test.json b/meson/test cases/failing/94 add dict non string key/test.json new file mode 100644 index 000000000..09459dd0c --- /dev/null +++ b/meson/test cases/failing/94 add dict non string key/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/94 add dict non string key/meson.build:9:0: ERROR: Key must be a string" + } + ] +} diff --git a/meson/test cases/failing/95 add dict duplicate keys/meson.build b/meson/test cases/failing/95 add dict duplicate keys/meson.build new file mode 100644 index 000000000..7a9b523b0 --- /dev/null +++ b/meson/test cases/failing/95 add dict duplicate keys/meson.build @@ -0,0 +1,9 @@ +project('add dictionary entries with duplicate keys') + +dict = {} + +# A variable to be used as a key +key = 'myKey' + +# Add two entries with duplicate keys should fail +dict += {key : 'myValue1', key : 'myValue2'} \ No newline at end of file diff --git a/meson/test cases/failing/95 add dict duplicate keys/test.json b/meson/test cases/failing/95 add dict duplicate keys/test.json new file mode 100644 index 000000000..ae9a81f34 --- /dev/null +++ b/meson/test cases/failing/95 add dict duplicate keys/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/95 add dict duplicate keys/meson.build:9:0: ERROR: Duplicate dictionary key: myKey" + } + ] +} diff --git a/meson/test cases/failing/96 no host get_external_property/meson.build b/meson/test cases/failing/96 no host get_external_property/meson.build new file mode 100644 index 000000000..c9567549f --- /dev/null +++ b/meson/test cases/failing/96 no host get_external_property/meson.build @@ -0,0 +1,3 @@ +project('missing property') + +message(meson.get_external_property('nonexisting')) diff --git a/meson/test cases/failing/96 no host get_external_property/test.json b/meson/test cases/failing/96 no host get_external_property/test.json new file mode 100644 index 000000000..3376a0b63 --- /dev/null +++ b/meson/test cases/failing/96 no host get_external_property/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/96 no host get_external_property/meson.build:3:0: ERROR: Unknown property for host machine: nonexisting" + } + ] +} diff --git a/meson/test cases/failing/97 no native compiler/main.c b/meson/test cases/failing/97 no native compiler/main.c new file mode 100644 index 000000000..9b6bdc2ec --- /dev/null +++ b/meson/test cases/failing/97 no native compiler/main.c @@ -0,0 +1,3 @@ +int main(void) { + return 0; +} diff --git a/meson/test cases/failing/97 no native compiler/meson.build b/meson/test cases/failing/97 no native compiler/meson.build new file mode 100644 index 000000000..f0126ac15 --- /dev/null +++ b/meson/test cases/failing/97 no native compiler/meson.build @@ -0,0 +1,12 @@ +project('no native compiler') + +if not meson.is_cross_build() + error('MESON_SKIP_TEST test only applicable when cross building.') +endif + +if add_languages('c', required: false, native: true) + error('MESON_SKIP_TEST test only applicable when native compiler not available.') +endif + +add_languages('c') +executable('main', 'main.c', native: true) diff --git a/meson/test cases/failing/97 no native compiler/test.json b/meson/test cases/failing/97 no native compiler/test.json new file mode 100644 index 000000000..b79bd3b8e --- /dev/null +++ b/meson/test cases/failing/97 no native compiler/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/97 no native compiler/meson.build:12:0: ERROR: No host machine compiler for \"main.c\"" + } + ] +} diff --git a/meson/test cases/failing/98 subdir parse error/meson.build b/meson/test cases/failing/98 subdir parse error/meson.build new file mode 100644 index 000000000..ee5bb0cb7 --- /dev/null +++ b/meson/test cases/failing/98 subdir parse error/meson.build @@ -0,0 +1,2 @@ +project('subdir false plusassign', 'c') +subdir('subdir') diff --git a/meson/test cases/failing/98 subdir parse error/subdir/meson.build b/meson/test cases/failing/98 subdir parse error/subdir/meson.build new file mode 100644 index 000000000..3ac5ef938 --- /dev/null +++ b/meson/test cases/failing/98 subdir parse error/subdir/meson.build @@ -0,0 +1 @@ +3 += 4 diff --git a/meson/test cases/failing/98 subdir parse error/test.json b/meson/test cases/failing/98 subdir parse error/test.json new file mode 100644 index 000000000..8e0479990 --- /dev/null +++ b/meson/test cases/failing/98 subdir parse error/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/98 subdir parse error/subdir/meson.build:1:0: ERROR: Plusassignment target must be an id." + } + ] +} diff --git a/meson/test cases/failing/99 invalid option file/meson.build b/meson/test cases/failing/99 invalid option file/meson.build new file mode 100644 index 000000000..b0347c33f --- /dev/null +++ b/meson/test cases/failing/99 invalid option file/meson.build @@ -0,0 +1 @@ +project('invalid option file') diff --git a/meson/test cases/failing/99 invalid option file/meson_options.txt b/meson/test cases/failing/99 invalid option file/meson_options.txt new file mode 100644 index 000000000..eef843b53 --- /dev/null +++ b/meson/test cases/failing/99 invalid option file/meson_options.txt @@ -0,0 +1 @@ +' diff --git a/meson/test cases/failing/99 invalid option file/test.json b/meson/test cases/failing/99 invalid option file/test.json new file mode 100644 index 000000000..ebfddd894 --- /dev/null +++ b/meson/test cases/failing/99 invalid option file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/99 invalid option file/meson_options.txt:1:0: ERROR: lexer" + } + ] +} diff --git a/meson/test cases/fortran/1 basic/meson.build b/meson/test cases/fortran/1 basic/meson.build new file mode 100644 index 000000000..52e2d6f05 --- /dev/null +++ b/meson/test cases/fortran/1 basic/meson.build @@ -0,0 +1,13 @@ +project('simple fortran', 'fortran') + +fc = meson.get_compiler('fortran') +if fc.get_id() == 'gcc' + add_global_arguments('-fbounds-check', language : 'fortran') +endif + +args = fc.first_supported_argument(['-ffree-form', '-free', '/free']) +assert(args != [], 'No arguments found?') + +e = executable('simple', 'simple.f90', + fortran_args : args) +test('Simple Fortran', e) diff --git a/meson/test cases/fortran/1 basic/simple.f90 b/meson/test cases/fortran/1 basic/simple.f90 new file mode 100644 index 000000000..2160d561f --- /dev/null +++ b/meson/test cases/fortran/1 basic/simple.f90 @@ -0,0 +1,3 @@ +program main +print *, "Fortran compilation is working." +end program diff --git a/meson/test cases/fortran/10 find library/gzip.f90 b/meson/test cases/fortran/10 find library/gzip.f90 new file mode 100644 index 000000000..32f21d731 --- /dev/null +++ b/meson/test cases/fortran/10 find library/gzip.f90 @@ -0,0 +1,32 @@ +module gzip + +use iso_c_binding, only: c_char, c_ptr, c_int +implicit none + +interface +type(c_ptr) function gzopen(path, mode) bind(C) +import c_char, c_ptr + +character(kind=c_char), intent(in) :: path(*), mode(*) +end function gzopen +end interface + +interface +integer(c_int) function gzwrite(file, buf, len) bind(C) +import c_int, c_ptr, c_char + +type(c_ptr), value, intent(in) :: file +character(kind=c_char), intent(in) :: buf +integer(c_int), value, intent(in) :: len +end function gzwrite +end interface + +interface +integer(c_int) function gzclose(file) bind(C) +import c_int, c_ptr + +type(c_ptr), value, intent(in) :: file +end function gzclose +end interface + +end module gzip diff --git a/meson/test cases/fortran/10 find library/main.f90 b/meson/test cases/fortran/10 find library/main.f90 new file mode 100644 index 000000000..e885d30f5 --- /dev/null +++ b/meson/test cases/fortran/10 find library/main.f90 @@ -0,0 +1,38 @@ +program main +use iso_fortran_env, only: stderr=>error_unit +use iso_c_binding, only: c_int, c_char, c_null_char, c_ptr +use gzip, only: gzopen, gzwrite, gzclose + +implicit none + +character(kind=c_char,len=*), parameter :: path = c_char_"test.gz"//c_null_char +character(kind=c_char,len=*), parameter :: mode = c_char_"wb9"//c_null_char +integer(c_int), parameter :: buffer_size = 512 + +type(c_ptr) :: file +character(kind=c_char, len=buffer_size) :: buffer +integer(c_int) :: ret +integer :: i + +! open file +file = gzopen(path, mode) + +! fill buffer with data +do i=1,buffer_size/4 + write(buffer(4*(i-1)+1:4*i), '(i3.3, a)') i, new_line('') +end do +ret = gzwrite(file, buffer, buffer_size) +if (ret /= buffer_size) then + write(stderr,'(a, i3, a, i3, a)') 'Error: ', ret, ' / ', buffer_size, & + ' bytes written.' + stop 1 +end if + +! close file +ret = gzclose(file) +if (ret /= 0) then + write(stderr,*) 'Error: failure to close file with error code ', ret + stop 1 +end if + +end program diff --git a/meson/test cases/fortran/10 find library/meson.build b/meson/test cases/fortran/10 find library/meson.build new file mode 100644 index 000000000..2a2ef3111 --- /dev/null +++ b/meson/test cases/fortran/10 find library/meson.build @@ -0,0 +1,13 @@ +project('find fortran library', 'fortran') + +fc = meson.get_compiler('fortran') + +sources = ['main.f90', 'gzip.f90'] +zlib = fc.find_library('z', required: false) + +if not zlib.found() + error('MESON_SKIP_TEST: Z library not available.') +endif + +exe = executable('zlibtest', sources, dependencies : zlib) +test('testzlib', exe) diff --git a/meson/test cases/fortran/11 compiles links runs/meson.build b/meson/test cases/fortran/11 compiles links runs/meson.build new file mode 100644 index 000000000..81eb90791 --- /dev/null +++ b/meson/test cases/fortran/11 compiles links runs/meson.build @@ -0,0 +1,20 @@ +project('compiles_links_runs', 'fortran') + +fc = meson.get_compiler('fortran') + +code = '''error stop 123; end''' + +if not fc.compiles(code) + error('Fortran 2008 code failed to compile') +endif + +if not fc.links(code) + error('Fortran 2008 code failed to link') +endif + +if fc.run(code).returncode() != 123 + error('Fortran 2008 code failed to run') +endif + + + diff --git a/meson/test cases/fortran/12 submodule/a1.f90 b/meson/test cases/fortran/12 submodule/a1.f90 new file mode 100644 index 000000000..c4b4555c9 --- /dev/null +++ b/meson/test cases/fortran/12 submodule/a1.f90 @@ -0,0 +1,26 @@ +module a1 +implicit none + +interface +module elemental real function pi2tau(pi) + real, intent(in) :: pi +end function pi2tau + +module real function get_pi() +end function get_pi +end interface + +end module a1 + +program hierN + +use a1 +real :: tau, pi + +pi = get_pi() + +tau = pi2tau(pi) + +print *,'pi=',pi,'tau=',tau + +end program diff --git a/meson/test cases/fortran/12 submodule/a2.f90 b/meson/test cases/fortran/12 submodule/a2.f90 new file mode 100644 index 000000000..ba8a0dd2a --- /dev/null +++ b/meson/test cases/fortran/12 submodule/a2.f90 @@ -0,0 +1,10 @@ +! testing no space between submodule() +submodule(a1) a2 + +contains + +module procedure pi2tau + pi2tau = 2*pi +end procedure pi2tau + +end submodule a2 diff --git a/meson/test cases/fortran/12 submodule/a3.f90 b/meson/test cases/fortran/12 submodule/a3.f90 new file mode 100644 index 000000000..388167562 --- /dev/null +++ b/meson/test cases/fortran/12 submodule/a3.f90 @@ -0,0 +1,13 @@ +! submodule (bogus) foo +! testing don't detect commented submodule + +submodule (a1:a2) a3 ! testing inline comment + +contains + +module procedure get_pi + get_pi = 4.*atan(1.) +end procedure get_pi + + +end submodule a3 diff --git a/meson/test cases/fortran/12 submodule/child.f90 b/meson/test cases/fortran/12 submodule/child.f90 new file mode 100644 index 000000000..dc076ec47 --- /dev/null +++ b/meson/test cases/fortran/12 submodule/child.f90 @@ -0,0 +1,14 @@ +submodule (parent) parent + +contains + +module procedure pi2tau + pi2tau = 2*pi +end procedure pi2tau + +module procedure good +print *, 'Good!' +end procedure good + +end submodule parent + diff --git a/meson/test cases/fortran/12 submodule/meson.build b/meson/test cases/fortran/12 submodule/meson.build new file mode 100644 index 000000000..204a36b44 --- /dev/null +++ b/meson/test cases/fortran/12 submodule/meson.build @@ -0,0 +1,13 @@ +project('submodule single level', 'fortran', + meson_version: '>= 0.50.0') + +fortc = meson.get_compiler('fortran') +if fortc.get_id() == 'gcc' and fortc.version().version_compare('<6.0') + error('MESON_SKIP_TEST need gfortran >= 6.0 for submodule support') +endif + +hier2 = executable('single', 'parent.f90', 'child.f90') +test('single-level hierarchy', hier2) + +hierN = executable('multi', 'a1.f90', 'a2.f90', 'a3.f90') +test('multi-level hierarchy', hierN) diff --git a/meson/test cases/fortran/12 submodule/parent.f90 b/meson/test cases/fortran/12 submodule/parent.f90 new file mode 100644 index 000000000..efc7cf6fc --- /dev/null +++ b/meson/test cases/fortran/12 submodule/parent.f90 @@ -0,0 +1,26 @@ +module parent +real, parameter :: pi = 4.*atan(1.) +real :: tau + +interface +module elemental real function pi2tau(pi) + real, intent(in) :: pi +end function pi2tau + +module subroutine good() +end subroutine good +end interface + +end module parent + +program main + +use parent + +tau = pi2tau(pi) + +print *,'pi=',pi, 'tau=', tau + +call good() + +end program diff --git a/meson/test cases/fortran/13 coarray/main.f90 b/meson/test cases/fortran/13 coarray/main.f90 new file mode 100644 index 000000000..eee03ea03 --- /dev/null +++ b/meson/test cases/fortran/13 coarray/main.f90 @@ -0,0 +1,10 @@ +program main +implicit none + +if (this_image() == 1) print *, 'number of Fortran coarray images:', num_images() + +sync all ! semaphore, ensures message above is printed at top. + +print *, 'Process ', this_image() + +end program diff --git a/meson/test cases/fortran/13 coarray/meson.build b/meson/test cases/fortran/13 coarray/meson.build new file mode 100644 index 000000000..893cec941 --- /dev/null +++ b/meson/test cases/fortran/13 coarray/meson.build @@ -0,0 +1,24 @@ +project('Fortran coarray', 'fortran', + meson_version: '>=0.50') + +fc = meson.get_compiler('fortran') + +if ['pgi', 'flang'].contains(fc.get_id()) + error('MESON_SKIP_TEST: At least through PGI 19.10 and Flang 7.1 do not support Fortran Coarrays.') +endif + +# coarray is required because single-image fallback is an intrinsic feature +coarray = dependency('coarray') + +# check coarray, because user might not have all the library stack installed correctly +# for example, conflicting library/compiler versions on PATH +# this has to invoke a run of "sync all" to verify the MPI stack is functioning, +# particularly for dynamic linking +if fc.run('sync all; end', dependencies: coarray, name: 'Coarray link & run').returncode() != 0 + error('MESON_SKIP_TEST: coarray stack (including MPI) did not link correctly so that a simple test could run.') +endif + +exe = executable('hello', 'main.f90', + dependencies : coarray) + +test('Coarray hello world', exe, timeout: 10) diff --git a/meson/test cases/fortran/14 fortran links c/clib.c b/meson/test cases/fortran/14 fortran links c/clib.c new file mode 100644 index 000000000..81b2e0c17 --- /dev/null +++ b/meson/test cases/fortran/14 fortran links c/clib.c @@ -0,0 +1,7 @@ +#include + +void hello(void){ + + printf("hello from C\n"); + +} diff --git a/meson/test cases/fortran/14 fortran links c/clib.def b/meson/test cases/fortran/14 fortran links c/clib.def new file mode 100644 index 000000000..4caeb24a9 --- /dev/null +++ b/meson/test cases/fortran/14 fortran links c/clib.def @@ -0,0 +1,2 @@ +EXPORTS + hello diff --git a/meson/test cases/fortran/14 fortran links c/f_call_c.f90 b/meson/test cases/fortran/14 fortran links c/f_call_c.f90 new file mode 100644 index 000000000..b3f70a7ca --- /dev/null +++ b/meson/test cases/fortran/14 fortran links c/f_call_c.f90 @@ -0,0 +1,11 @@ +program main +implicit none + +interface +subroutine hello() bind (c) +end subroutine hello +end interface + +call hello() + +end program diff --git a/meson/test cases/fortran/14 fortran links c/meson.build b/meson/test cases/fortran/14 fortran links c/meson.build new file mode 100644 index 000000000..a45f06fd3 --- /dev/null +++ b/meson/test cases/fortran/14 fortran links c/meson.build @@ -0,0 +1,15 @@ +project('Fortran calling C', 'fortran', 'c', + meson_version: '>= 0.51.0', + default_options : ['default_library=static']) + +ccid = meson.get_compiler('c').get_id() +if ccid == 'msvc' or ccid == 'clang-cl' + error('MESON_SKIP_TEST: MSVC and GCC do not interoperate like this.') +endif + +c_lib = library('clib', 'clib.c', vs_module_defs : 'clib.def') + +f_call_c = executable('f_call_c', 'f_call_c.f90', + link_with: c_lib, + link_language: 'fortran') +test('Fortran calling C', f_call_c) diff --git a/meson/test cases/fortran/15 include/inc1.f90 b/meson/test cases/fortran/15 include/inc1.f90 new file mode 100644 index 000000000..163f5864b --- /dev/null +++ b/meson/test cases/fortran/15 include/inc1.f90 @@ -0,0 +1,5 @@ + +real :: pi = 4.*atan(1.) +real :: tau + +include "inc2.f90" ! testing inline comment diff --git a/meson/test cases/fortran/15 include/inc2.f90 b/meson/test cases/fortran/15 include/inc2.f90 new file mode 100644 index 000000000..065b9903d --- /dev/null +++ b/meson/test cases/fortran/15 include/inc2.f90 @@ -0,0 +1,2 @@ + +tau = 2*pi diff --git a/meson/test cases/fortran/15 include/include_hierarchy.f90 b/meson/test cases/fortran/15 include/include_hierarchy.f90 new file mode 100644 index 000000000..0598d874f --- /dev/null +++ b/meson/test cases/fortran/15 include/include_hierarchy.f90 @@ -0,0 +1,9 @@ +program test_include_hier + +implicit none + +include "inc1.f90" + +print *, '2*pi:', tau + +end program diff --git a/meson/test cases/fortran/15 include/include_syntax.f90 b/meson/test cases/fortran/15 include/include_syntax.f90 new file mode 100644 index 000000000..5f7eb9f2e --- /dev/null +++ b/meson/test cases/fortran/15 include/include_syntax.f90 @@ -0,0 +1,25 @@ +program test_include_syntax + +implicit none + +integer :: x, y + +x = 1 +y = 0 + +! include "timestwo.f90" + +include "timestwo.f90" ! inline comment check +if (x/=2) error stop 'failed on first include' + +! leading space check + include 'timestwo.f90' +if (x/=4) error stop 'failed on second include' + +! Most Fortran compilers can't handle the non-standard #include, +! including (ha!) Flang, Gfortran, Ifort and PGI. +! #include "timestwo.f90" + +print *, 'OK: Fortran include tests: x=',x + +end program diff --git a/meson/test cases/fortran/15 include/meson.build b/meson/test cases/fortran/15 include/meson.build new file mode 100644 index 000000000..6ba0afa0a --- /dev/null +++ b/meson/test cases/fortran/15 include/meson.build @@ -0,0 +1,19 @@ +project('Inclusive', 'fortran', + meson_version: '>= 0.51.1') + +cm = import('cmake') + +hier_exe = executable('include_hierarchy', 'include_hierarchy.f90') +test('Fortran include file hierarchy', hier_exe) + +syntax_exe = executable('include_syntax', 'include_syntax.f90') +test('Fortran include file syntax', syntax_exe) + +# older CI runs into problems with too-old Ninja and CMake and Fortran +ninja_version = run_command('ninja', '--version').stdout().strip() +cmake_version = run_command('cmake', '--version').stdout().split()[2] +if ninja_version.version_compare('>=1.10.0') and cmake_version.version_compare('>=3.17.0') + cm.subproject('cmake_inc') +else + message('SKIP: CMake Fortran subproject with include. Ninja >= 1.10 and CMake >= 3.17 needed. You have Ninja ' + ninja_version + ' and CMake ' + cmake_version) +endif diff --git a/meson/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt b/meson/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt new file mode 100644 index 000000000..1ffe88267 --- /dev/null +++ b/meson/test cases/fortran/15 include/subprojects/cmake_inc/CMakeLists.txt @@ -0,0 +1,4 @@ +cmake_minimum_required(VERSION 3.17) +project(cmake_inc LANGUAGES Fortran) + +add_executable(main main.f90) diff --git a/meson/test cases/fortran/15 include/subprojects/cmake_inc/main.f90 b/meson/test cases/fortran/15 include/subprojects/cmake_inc/main.f90 new file mode 100644 index 000000000..dd2991d03 --- /dev/null +++ b/meson/test cases/fortran/15 include/subprojects/cmake_inc/main.f90 @@ -0,0 +1,9 @@ +program test_subproject_inc + +implicit none + +include 'thousand.f90' + +if (thousand /= 1000) error stop 'did not include properly' + +end program diff --git a/meson/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90 b/meson/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90 new file mode 100644 index 000000000..08a4048bc --- /dev/null +++ b/meson/test cases/fortran/15 include/subprojects/cmake_inc/thousand.f90 @@ -0,0 +1 @@ +integer, parameter :: thousand = 1000 diff --git a/meson/test cases/fortran/15 include/timestwo.f90 b/meson/test cases/fortran/15 include/timestwo.f90 new file mode 100644 index 000000000..0e2d5ac39 --- /dev/null +++ b/meson/test cases/fortran/15 include/timestwo.f90 @@ -0,0 +1,2 @@ +x = 2*x +y = y+1 \ No newline at end of file diff --git a/meson/test cases/fortran/16 openmp/main.f90 b/meson/test cases/fortran/16 openmp/main.f90 new file mode 100644 index 000000000..26b792fe8 --- /dev/null +++ b/meson/test cases/fortran/16 openmp/main.f90 @@ -0,0 +1,18 @@ +program main +use, intrinsic :: iso_fortran_env, only: stderr=>error_unit +use omp_lib, only: omp_get_max_threads +implicit none + +integer :: N, ierr +character(80) :: buf ! can't be allocatable in this use case. Just set arbitrarily large. + +call get_environment_variable('OMP_NUM_THREADS', buf, status=ierr) +if (ierr/=0) error stop 'environment variable OMP_NUM_THREADS could not be read' +read(buf,*) N + +if (omp_get_max_threads() /= N) then + write(stderr, *) 'Max Fortran threads: ', omp_get_max_threads(), '!=', N + error stop +endif + +end program diff --git a/meson/test cases/fortran/16 openmp/meson.build b/meson/test cases/fortran/16 openmp/meson.build new file mode 100644 index 000000000..f021ce2d8 --- /dev/null +++ b/meson/test cases/fortran/16 openmp/meson.build @@ -0,0 +1,34 @@ +# This test is complementary to and extends "common/190 openmp" so that +# we can examine more compilers and options than would be warranted in +# the common test where C/C++ must also be handled. +project('openmp', 'fortran', + meson_version: '>= 0.46') + + +fc = meson.get_compiler('fortran') +if fc.get_id() == 'gcc' and fc.version().version_compare('<4.2.0') + error('MESON_SKIP_TEST gcc is too old to support OpenMP.') +endif +if host_machine.system() == 'darwin' + error('MESON_SKIP_TEST macOS does not support OpenMP.') +endif + +openmp = dependency('openmp') + +env = environment() +env.set('OMP_NUM_THREADS', '2') + +exef = executable('exef', + 'main.f90', + dependencies : [openmp]) +test('OpenMP Fortran', exef, env : env) + +openmp_f = dependency('openmp', language : 'fortran') +exe_f = executable('exe_f', + 'main.f90', + dependencies : [openmp_f]) +test('OpenMP Fortran-specific', exe_f, env : env) + + +# Check we can apply a version constraint +dependency('openmp', version: '>=@0@'.format(openmp.version())) diff --git a/meson/test cases/fortran/17 add_languages/meson.build b/meson/test cases/fortran/17 add_languages/meson.build new file mode 100644 index 000000000..e7de1808b --- /dev/null +++ b/meson/test cases/fortran/17 add_languages/meson.build @@ -0,0 +1,5 @@ +project('add_lang_fortran') + +# catch bug where Fortran compiler is found with project('foo', 'fortran') but +# not by add_languages('fortran') +assert(add_languages('fortran'), 'these tests assume Fortran compiler can be found') \ No newline at end of file diff --git a/meson/test cases/fortran/18 first_arg/main.f90 b/meson/test cases/fortran/18 first_arg/main.f90 new file mode 100644 index 000000000..6ea28b1b5 --- /dev/null +++ b/meson/test cases/fortran/18 first_arg/main.f90 @@ -0,0 +1,3 @@ +program main +i = 3 +end program diff --git a/meson/test cases/fortran/18 first_arg/meson.build b/meson/test cases/fortran/18 first_arg/meson.build new file mode 100644 index 000000000..63021f2a3 --- /dev/null +++ b/meson/test cases/fortran/18 first_arg/meson.build @@ -0,0 +1,46 @@ +project('fortran_args', 'fortran') + +fc = meson.get_compiler('fortran') + +if fc.get_id() == 'intel-cl' + is_arg = '/O2' + useless = '/DFOO' +else + is_arg = '-O2' + useless = '-DFOO' +endif + +isnt_arg = '-fiambroken' + +assert(fc.has_argument(is_arg), 'Arg that should have worked does not work.') +assert(not fc.has_argument(isnt_arg), 'Arg that should be broken is not.') + +assert(fc.get_supported_arguments([is_arg, isnt_arg, useless]) == [is_arg, useless], 'Arg filtering returned different result.') + +# Have useless at the end to ensure that the search goes from front to back. +l1 = fc.first_supported_argument([isnt_arg, is_arg, isnt_arg, useless]) +l2 = fc.first_supported_argument(isnt_arg, isnt_arg, isnt_arg) + +assert(l1.length() == 1, 'First supported returned wrong result.') +assert(l1.get(0) == is_arg, 'First supported returned wrong argument.') +assert(l2.length() == 0, 'First supported did not return empty array.') + +# --- test with an actual program, here for implicit none + +in0 = fc.first_supported_argument('-fimplicit-none', '-Mdclchk', '/warn:declarations', '-warn').get(0, '') +impnone = { +'intel-cl': '/warn:declarations', +'intel': '-warn', +'gcc': '-fimplicit-none', +'pgi': '-Mdclchk', +} + +arg = impnone.get(fc.get_id(), '') +if arg != '' + assert(in0 == arg, 'implicit none argument ' + arg + ' not matching ' + in0) +endif + +in1 = fc.get_supported_arguments('-fimplicit-none', '/warn:declarations', '/warn:errors', '-Mdclchk') +if in1.length() > 0 + assert(not fc.compiles(files('main.f90'), args: in1, name:'will fail implicit none'), 'implicit none should have failed') +endif diff --git a/meson/test cases/fortran/19 fortran_std/legacy.f b/meson/test cases/fortran/19 fortran_std/legacy.f new file mode 100644 index 000000000..014bcc1d0 --- /dev/null +++ b/meson/test cases/fortran/19 fortran_std/legacy.f @@ -0,0 +1,8 @@ + program main + ! non-integer loop indices are deleted in Fortran 95 standard + real a + + do 10 a=0,0.5,0.1 +10 continue + + end program diff --git a/meson/test cases/fortran/19 fortran_std/meson.build b/meson/test cases/fortran/19 fortran_std/meson.build new file mode 100644 index 000000000..f46f8ffe4 --- /dev/null +++ b/meson/test cases/fortran/19 fortran_std/meson.build @@ -0,0 +1,27 @@ +project('FortranStd', 'fortran', + default_options: ['warning_level=0']) +# As with C and C++, each Fortran compiler + version has a subset of supported Fortran standards +# Additionally, a necessary option for non-standard Fortran projects is the "legacy" +# option, which allows non-standard syntax and behavior quirks. +# Thus "legacy" is a necessity for some old but important Fortran projects. +# By default, popular Fortran compilers disallow these quirks without "legacy" option. + +fc = meson.get_compiler('fortran') + +executable('stdnone', 'std95.f90') + +executable('std_legacy', 'legacy.f', override_options : ['fortran_std=legacy']) + +executable('std_95', 'std95.f90', override_options : ['fortran_std=f95']) + +executable('std_f2003', 'std2003.f90', override_options : ['fortran_std=f2003']) + +executable('std_f2008', 'std2008.f90', override_options : ['fortran_std=f2008']) + +if fc.get_id() == 'gcc' + if fc.version().version_compare('>=8.0') + executable('std_f2018', 'std2018.f90', override_options : ['fortran_std=f2018']) + endif +else + executable('std_f2018', 'std2018.f90', override_options : ['fortran_std=f2018']) +endif \ No newline at end of file diff --git a/meson/test cases/fortran/19 fortran_std/std2003.f90 b/meson/test cases/fortran/19 fortran_std/std2003.f90 new file mode 100644 index 000000000..0382192d8 --- /dev/null +++ b/meson/test cases/fortran/19 fortran_std/std2003.f90 @@ -0,0 +1,37 @@ +program main +use, intrinsic :: iso_fortran_env, only : error_unit +implicit none + +! http://fortranwiki.org/fortran/show/Real+precision +integer, parameter :: sp = selected_real_kind(6, 37) +integer, parameter :: dp = selected_real_kind(15, 307) + +real(sp) :: a32 +real(dp) :: a64 + +real(sp), parameter :: pi32 = 4*atan(1._sp) +real(dp), parameter :: pi64 = 4*atan(1._dp) + +if (pi32 == pi64) stop 1 + +call timestwo(a32) +call timestwo(a64) + +contains + +elemental subroutine timestwo(a) + +class(*), intent(inout) :: a + +select type (a) + type is (real(sp)) + a = 2*a + type is (real(dp)) + a = 2*a + type is (integer) + a = 2*a +end select + +end subroutine timestwo + +end program diff --git a/meson/test cases/fortran/19 fortran_std/std2008.f90 b/meson/test cases/fortran/19 fortran_std/std2008.f90 new file mode 100644 index 000000000..750173e1b --- /dev/null +++ b/meson/test cases/fortran/19 fortran_std/std2008.f90 @@ -0,0 +1,33 @@ +program main +use, intrinsic :: iso_fortran_env, only : error_unit, sp=>real32, dp=>real64 +implicit none + +real(sp) :: a32 +real(dp) :: a64 + +real(sp), parameter :: pi32 = 4*atan(1._sp) +real(dp), parameter :: pi64 = 4*atan(1._dp) + +if (pi32 == pi64) error stop 'real32 values generally do not exactly equal real64 values' + +call timestwo(a32) +call timestwo(a64) + +contains + +elemental subroutine timestwo(a) + +class(*), intent(inout) :: a + +select type (a) + type is (real(sp)) + a = 2*a + type is (real(dp)) + a = 2*a + type is (integer) + a = 2*a +end select + +end subroutine timestwo + +end program diff --git a/meson/test cases/fortran/19 fortran_std/std2018.f90 b/meson/test cases/fortran/19 fortran_std/std2018.f90 new file mode 100644 index 000000000..34fad50bf --- /dev/null +++ b/meson/test cases/fortran/19 fortran_std/std2018.f90 @@ -0,0 +1,35 @@ +program main +use, intrinsic :: iso_fortran_env, only : error_unit, sp=>real32, dp=>real64 +implicit none + +real(sp) :: a32 +real(dp) :: a64 + +real(sp), parameter :: pi32 = 4*atan(1._sp) +real(dp), parameter :: pi64 = 4*atan(1._dp) + +if (pi32 == pi64) error stop 'real32 values generally do not exactly equal real64 values' + +call timestwo(a32) +call timestwo(a64) + +contains + +elemental subroutine timestwo(a) + +class(*), intent(inout) :: a + +select type (a) + type is (real(sp)) + a = 2*a + type is (real(dp)) + a = 2*a + type is (integer) + a = 2*a + class default + error stop 'requires real32, real64 or integer' +end select + +end subroutine timestwo + +end program diff --git a/meson/test cases/fortran/19 fortran_std/std95.f90 b/meson/test cases/fortran/19 fortran_std/std95.f90 new file mode 100644 index 000000000..2837da86a --- /dev/null +++ b/meson/test cases/fortran/19 fortran_std/std95.f90 @@ -0,0 +1,14 @@ +program main +implicit none + +integer :: i, j +integer, parameter :: N=3 +real :: A(N,N) + +A = 0 + +forall (i=1:N, j=1:N) + A(i,j) = 1 +end forall + +end program diff --git a/meson/test cases/fortran/2 modules/comment_mod.f90 b/meson/test cases/fortran/2 modules/comment_mod.f90 new file mode 100644 index 000000000..917f6be9a --- /dev/null +++ b/meson/test cases/fortran/2 modules/comment_mod.f90 @@ -0,0 +1,6 @@ +module line ! inline comment +implicit none + +real :: length + +end module line diff --git a/meson/test cases/fortran/2 modules/meson.build b/meson/test cases/fortran/2 modules/meson.build new file mode 100644 index 000000000..c9bfd8dae --- /dev/null +++ b/meson/test cases/fortran/2 modules/meson.build @@ -0,0 +1,9 @@ +project('modules', 'fortran', + default_options : ['default_library=static']) + +commented = library('commented', 'comment_mod.f90') + +# Have one file with an upper case file extension. +e = executable('modprog', 'mymod.F90', 'prog.f90', + link_with: commented) +test('moduletest', e) diff --git a/meson/test cases/fortran/2 modules/mymod.F90 b/meson/test cases/fortran/2 modules/mymod.F90 new file mode 100644 index 000000000..a45f5c982 --- /dev/null +++ b/meson/test cases/fortran/2 modules/mymod.F90 @@ -0,0 +1,8 @@ +! module circle to be sure module regex doesn't allow commented modules + +module circle +implicit none + +real, parameter :: pi = 4.*atan(1.) +real :: radius +end module circle diff --git a/meson/test cases/fortran/2 modules/prog.f90 b/meson/test cases/fortran/2 modules/prog.f90 new file mode 100644 index 000000000..ffdff0506 --- /dev/null +++ b/meson/test cases/fortran/2 modules/prog.f90 @@ -0,0 +1,12 @@ +program main +use circle, only: pi +use line, only: length +implicit none + +print *,'pi=',pi + +length = pi +print *, length + +end program + diff --git a/meson/test cases/fortran/20 buildtype/main.f90 b/meson/test cases/fortran/20 buildtype/main.f90 new file mode 100644 index 000000000..ecc7d618c --- /dev/null +++ b/meson/test cases/fortran/20 buildtype/main.f90 @@ -0,0 +1,2 @@ +program main +end program diff --git a/meson/test cases/fortran/20 buildtype/meson.build b/meson/test cases/fortran/20 buildtype/meson.build new file mode 100644 index 000000000..2be633758 --- /dev/null +++ b/meson/test cases/fortran/20 buildtype/meson.build @@ -0,0 +1,5 @@ +# checks for unexpected behavior on non-default buildtype and warning_level +project('build type Fortran', 'fortran', + default_options: ['buildtype=release', 'warning_level=3']) + +executable('main', 'main.f90') diff --git a/meson/test cases/fortran/21 install static/main.f90 b/meson/test cases/fortran/21 install static/main.f90 new file mode 100644 index 000000000..d0c67fe8e --- /dev/null +++ b/meson/test cases/fortran/21 install static/main.f90 @@ -0,0 +1,5 @@ +program main +use main_lib +implicit none +call main_hello() +end program diff --git a/meson/test cases/fortran/21 install static/main_lib.f90 b/meson/test cases/fortran/21 install static/main_lib.f90 new file mode 100644 index 000000000..5f3cb451a --- /dev/null +++ b/meson/test cases/fortran/21 install static/main_lib.f90 @@ -0,0 +1,16 @@ +module main_lib + + use static_hello + implicit none + + private + public :: main_hello + + contains + + subroutine main_hello + call static_say_hello() + print *, "Main hello routine finished." + end subroutine main_hello + +end module main_lib diff --git a/meson/test cases/fortran/21 install static/meson.build b/meson/test cases/fortran/21 install static/meson.build new file mode 100644 index 000000000..b4d3e40b0 --- /dev/null +++ b/meson/test cases/fortran/21 install static/meson.build @@ -0,0 +1,20 @@ +# Based on 'fortran/5 static', but: +# - Uses a subproject dependency +# - Is an install:true static library to trigger certain codepath (promotion to link_whole) +# - Does fortran code 'generation' with configure_file +# - Uses .F90 ext (capital F typically denotes a dependence on preprocessor treatment, which however is not used) +project('try-static-subproject-dependency', 'fortran') + +static_dep = dependency('static_hello', fallback: ['static_hello', 'static_hello_dep']) + +mainsrc = 'main_lib.f90' +mainsrc = configure_file( + copy: true, + input: mainsrc, + output: 'main_lib_output.F90' +) +main_lib = library('mainstatic', mainsrc, dependencies: static_dep, install: true) +main_dep = declare_dependency(link_with: main_lib) + +main_exe = executable('main_exe', 'main.f90', dependencies: main_dep) +test('static_subproject_test', main_exe) diff --git a/meson/test cases/fortran/21 install static/subprojects/static_hello/meson.build b/meson/test cases/fortran/21 install static/subprojects/static_hello/meson.build new file mode 100644 index 000000000..5e13bae55 --- /dev/null +++ b/meson/test cases/fortran/21 install static/subprojects/static_hello/meson.build @@ -0,0 +1,12 @@ +project('static-hello', 'fortran') + +# staticlibsource = 'static_hello.f90' +staticlibsource = configure_file( + copy: true, + input: 'static_hello.f90', + output: 'static_hello_output.F90' +) + +static_hello_lib = static_library('static_hello', staticlibsource, install: false) + +static_hello_dep = declare_dependency(link_with: static_hello_lib) diff --git a/meson/test cases/fortran/21 install static/subprojects/static_hello/static_hello.f90 b/meson/test cases/fortran/21 install static/subprojects/static_hello/static_hello.f90 new file mode 100644 index 000000000..5407560d7 --- /dev/null +++ b/meson/test cases/fortran/21 install static/subprojects/static_hello/static_hello.f90 @@ -0,0 +1,17 @@ +module static_hello +implicit none + +private +public :: static_say_hello + +interface static_say_hello + module procedure say_hello +end interface static_say_hello + +contains + +subroutine say_hello + print *, "Static library called." +end subroutine say_hello + +end module static_hello diff --git a/meson/test cases/fortran/21 install static/test.json b/meson/test cases/fortran/21 install static/test.json new file mode 100644 index 000000000..aff714730 --- /dev/null +++ b/meson/test cases/fortran/21 install static/test.json @@ -0,0 +1,10 @@ +{ + "installed": [ + {"file": "usr/lib/libmainstatic.a", "type": "file"} + ], + "matrix": { + "options": { + "default_library": [ { "val": "static" } ] + } + } +} diff --git a/meson/test cases/fortran/3 module procedure/meson.build b/meson/test cases/fortran/3 module procedure/meson.build new file mode 100644 index 000000000..a59001570 --- /dev/null +++ b/meson/test cases/fortran/3 module procedure/meson.build @@ -0,0 +1,5 @@ +project('Fortran 2003 use statement, in same file', 'fortran', + meson_version: '>= 0.50.0') + +e = executable('use_syntax', 'use_syntax.f90') +test('Fortran 2003 use syntax', e) diff --git a/meson/test cases/fortran/3 module procedure/use_syntax.f90 b/meson/test cases/fortran/3 module procedure/use_syntax.f90 new file mode 100644 index 000000000..2f3a9e652 --- /dev/null +++ b/meson/test cases/fortran/3 module procedure/use_syntax.f90 @@ -0,0 +1,31 @@ +module circle +implicit none + +integer :: x +real :: radius + +interface default + module procedure timestwo +end interface + +contains + +elemental integer function timestwo(x) result(y) + integer, intent(in) :: x + y = 2*x +end function +end module circle + +program prog + +use, non_intrinsic :: circle, only: timestwo, x + +implicit none + +x = 3 + +if (timestwo(x) /= 6) error stop 'fortran module procedure problem' + +print *,'OK: Fortran module procedure' + +end program prog diff --git a/meson/test cases/fortran/4 self dependency/meson.build b/meson/test cases/fortran/4 self dependency/meson.build new file mode 100644 index 000000000..e791284ff --- /dev/null +++ b/meson/test cases/fortran/4 self dependency/meson.build @@ -0,0 +1,8 @@ +project('selfdep', 'fortran') + +e = executable('selfdep', 'selfdep.f90') +test('selfdep', e) + +library('selfmod', 'src/selfdep_mod.f90') + +subproject('sub1') diff --git a/meson/test cases/fortran/4 self dependency/selfdep.f90 b/meson/test cases/fortran/4 self dependency/selfdep.f90 new file mode 100644 index 000000000..1a7135300 --- /dev/null +++ b/meson/test cases/fortran/4 self dependency/selfdep.f90 @@ -0,0 +1,18 @@ +MODULE geom + +type :: circle + REAL :: Pi = 4.*atan(1.) + REAL :: radius +end type circle +END MODULE geom + +PROGRAM prog + +use geom, only : circle +IMPLICIT NONE + +type(circle) :: ell + +ell%radius = 3. + +END PROGRAM prog diff --git a/meson/test cases/fortran/4 self dependency/src/selfdep_mod.f90 b/meson/test cases/fortran/4 self dependency/src/selfdep_mod.f90 new file mode 100644 index 000000000..4aa00576a --- /dev/null +++ b/meson/test cases/fortran/4 self dependency/src/selfdep_mod.f90 @@ -0,0 +1,6 @@ +module a +end module a + +module b +use a +end module b diff --git a/meson/test cases/fortran/4 self dependency/subprojects/sub1/main.f90 b/meson/test cases/fortran/4 self dependency/subprojects/sub1/main.f90 new file mode 100644 index 000000000..873427db6 --- /dev/null +++ b/meson/test cases/fortran/4 self dependency/subprojects/sub1/main.f90 @@ -0,0 +1,6 @@ +module a +end + +program b + use a +end diff --git a/meson/test cases/fortran/4 self dependency/subprojects/sub1/meson.build b/meson/test cases/fortran/4 self dependency/subprojects/sub1/meson.build new file mode 100644 index 000000000..606f338c8 --- /dev/null +++ b/meson/test cases/fortran/4 self dependency/subprojects/sub1/meson.build @@ -0,0 +1,3 @@ +project('subproject self-def', 'fortran') + +library('subself', 'main.f90') diff --git a/meson/test cases/fortran/5 static/main.f90 b/meson/test cases/fortran/5 static/main.f90 new file mode 100644 index 000000000..4db2861ee --- /dev/null +++ b/meson/test cases/fortran/5 static/main.f90 @@ -0,0 +1,6 @@ +program main +use static_hello +implicit none + +call static_say_hello() +end program diff --git a/meson/test cases/fortran/5 static/meson.build b/meson/test cases/fortran/5 static/meson.build new file mode 100644 index 000000000..ab9d3c4b4 --- /dev/null +++ b/meson/test cases/fortran/5 static/meson.build @@ -0,0 +1,6 @@ +project('try-static-library', 'fortran') + +static_hello = static_library('static_hello', 'static_hello.f90') + +exe = executable('test_exe', 'main.f90', link_with : static_hello) +test('static-fortran', exe) diff --git a/meson/test cases/fortran/5 static/static_hello.f90 b/meson/test cases/fortran/5 static/static_hello.f90 new file mode 100644 index 000000000..5407560d7 --- /dev/null +++ b/meson/test cases/fortran/5 static/static_hello.f90 @@ -0,0 +1,17 @@ +module static_hello +implicit none + +private +public :: static_say_hello + +interface static_say_hello + module procedure say_hello +end interface static_say_hello + +contains + +subroutine say_hello + print *, "Static library called." +end subroutine say_hello + +end module static_hello diff --git a/meson/test cases/fortran/6 dynamic/dynamic.f90 b/meson/test cases/fortran/6 dynamic/dynamic.f90 new file mode 100644 index 000000000..6a1f3590f --- /dev/null +++ b/meson/test cases/fortran/6 dynamic/dynamic.f90 @@ -0,0 +1,17 @@ +module dynamic +implicit none + +private +public :: hello + +interface hello + module procedure say +end interface hello + +contains + +subroutine say + print *, "Hello from shared library." +end subroutine say + +end module dynamic diff --git a/meson/test cases/fortran/6 dynamic/main.f90 b/meson/test cases/fortran/6 dynamic/main.f90 new file mode 100644 index 000000000..ba2e2d2fc --- /dev/null +++ b/meson/test cases/fortran/6 dynamic/main.f90 @@ -0,0 +1,6 @@ +program main +use dynamic, only: hello +implicit none + +call hello() +end program diff --git a/meson/test cases/fortran/6 dynamic/meson.build b/meson/test cases/fortran/6 dynamic/meson.build new file mode 100644 index 000000000..413223b0a --- /dev/null +++ b/meson/test cases/fortran/6 dynamic/meson.build @@ -0,0 +1,13 @@ +project('dynamic_fortran', 'fortran') + +fcid = meson.get_compiler('fortran').get_id() +if fcid == 'intel-cl' or (host_machine.system() == 'windows' and fcid == 'pgi') + error('MESON_SKIP_TEST: non-Gfortran Windows Fortran compilers do not do shared libraries in a Fortran standard way') + # !DEC$ ATTRIBUTES DLLEXPORT must be used! + # https://software.intel.com/en-us/node/535306 + # https://www.pgroup.com/resources/docs/19.4/x86/pgi-user-guide/index.htm#lib-dynlnk-bld-dll-fort +endif + +dynamic = shared_library('dynamic', 'dynamic.f90') +exe = executable('test_exe', 'main.f90', link_with : dynamic) +test('dynamic-fortran', exe) diff --git a/meson/test cases/fortran/7 generated/meson.build b/meson/test cases/fortran/7 generated/meson.build new file mode 100644 index 000000000..b555b1744 --- /dev/null +++ b/meson/test cases/fortran/7 generated/meson.build @@ -0,0 +1,29 @@ +# Tests whether fortran sources files created during configuration are properly +# scanned for dependency information + +project('generated', 'fortran', + default_options : ['default_library=static']) + +conf_data = configuration_data() +conf_data.set('ONE', 1) +conf_data.set('TWO', 2) +conf_data.set('THREE', 3) + +configure_file(input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data) +# Manually build absolute path to source file to test +# https://github.com/mesonbuild/meson/issues/7265 +three = library('mod3', meson.current_build_dir() / 'mod3.f90') + +templates_basenames = ['mod2', 'mod1'] +generated_sources = [] +foreach template_basename : templates_basenames + infilename = '@0@.fpp'.format(template_basename) + outfilename = '@0@.f90'.format(template_basename) + outfile = configure_file( + input : infilename, output : outfilename, configuration : conf_data) + generated_sources += [outfile] +endforeach + +sources = ['prog.f90'] + generated_sources +exe = executable('generated', sources, link_with: three) +test('generated', exe) diff --git a/meson/test cases/fortran/7 generated/mod1.fpp b/meson/test cases/fortran/7 generated/mod1.fpp new file mode 100644 index 000000000..c4decf68f --- /dev/null +++ b/meson/test cases/fortran/7 generated/mod1.fpp @@ -0,0 +1,6 @@ +module mod1 +implicit none + +integer, parameter :: modval1 = @ONE@ + +end module mod1 diff --git a/meson/test cases/fortran/7 generated/mod2.fpp b/meson/test cases/fortran/7 generated/mod2.fpp new file mode 100644 index 000000000..78ceae45d --- /dev/null +++ b/meson/test cases/fortran/7 generated/mod2.fpp @@ -0,0 +1,7 @@ +module mod2 +use mod1, only : modval1 +implicit none + +integer, parameter :: modval2 = @TWO@ + +end module mod2 diff --git a/meson/test cases/fortran/7 generated/mod3.fpp b/meson/test cases/fortran/7 generated/mod3.fpp new file mode 100644 index 000000000..ab3db6555 --- /dev/null +++ b/meson/test cases/fortran/7 generated/mod3.fpp @@ -0,0 +1,6 @@ +module mod3 +implicit none + +integer, parameter :: modval3 = @THREE@ + +end module mod3 diff --git a/meson/test cases/fortran/7 generated/prog.f90 b/meson/test cases/fortran/7 generated/prog.f90 new file mode 100644 index 000000000..6ee0bca15 --- /dev/null +++ b/meson/test cases/fortran/7 generated/prog.f90 @@ -0,0 +1,8 @@ +program generated +use mod2, only : modval1, modval2 +use mod3, only : modval3 +implicit none + +if (modval1 + modval2 + modval3 /= 6) error stop + +end program generated diff --git a/meson/test cases/fortran/8 module names/meson.build b/meson/test cases/fortran/8 module names/meson.build new file mode 100644 index 000000000..632c59788 --- /dev/null +++ b/meson/test cases/fortran/8 module names/meson.build @@ -0,0 +1,6 @@ +project('mod_name_case', 'fortran') + +sources = ['test.f90', 'mod1.f90', 'mod2.f90'] + +exe = executable('mod_name_case', sources) +test('mod_name_case', exe) diff --git a/meson/test cases/fortran/8 module names/mod1.f90 b/meson/test cases/fortran/8 module names/mod1.f90 new file mode 100644 index 000000000..29cd9f443 --- /dev/null +++ b/meson/test cases/fortran/8 module names/mod1.f90 @@ -0,0 +1,6 @@ +module MyMod1 +implicit none + +integer, parameter :: myModVal1 = 1 + +end module MyMod1 diff --git a/meson/test cases/fortran/8 module names/mod2.f90 b/meson/test cases/fortran/8 module names/mod2.f90 new file mode 100644 index 000000000..2087750de --- /dev/null +++ b/meson/test cases/fortran/8 module names/mod2.f90 @@ -0,0 +1,6 @@ +module mymod2 +implicit none + +integer, parameter :: myModVal2 = 2 + +end module mymod2 diff --git a/meson/test cases/fortran/8 module names/test.f90 b/meson/test cases/fortran/8 module names/test.f90 new file mode 100644 index 000000000..60ff16e90 --- /dev/null +++ b/meson/test cases/fortran/8 module names/test.f90 @@ -0,0 +1,9 @@ +program main +use mymod1 +use MyMod2 ! test inline comment + +implicit none + +integer, parameter :: testVar = myModVal1 + myModVal2 + +end program diff --git a/meson/test cases/fortran/9 cpp/fortran.f b/meson/test cases/fortran/9 cpp/fortran.f new file mode 100644 index 000000000..255872c91 --- /dev/null +++ b/meson/test cases/fortran/9 cpp/fortran.f @@ -0,0 +1,11 @@ + function fortran() bind(C) + use, intrinsic :: iso_c_binding, only: dp=>c_double + implicit none + + real(dp) :: r, fortran + + call random_number(r) + + fortran = 2._dp**r + + end function fortran diff --git a/meson/test cases/fortran/9 cpp/main.c b/meson/test cases/fortran/9 cpp/main.c new file mode 100644 index 000000000..c1750ad69 --- /dev/null +++ b/meson/test cases/fortran/9 cpp/main.c @@ -0,0 +1,8 @@ +#include + +double fortran(void); + +int main(void) { + printf("FORTRAN gave us this number: %lf.\n", fortran()); + return 0; +} diff --git a/meson/test cases/fortran/9 cpp/main.cpp b/meson/test cases/fortran/9 cpp/main.cpp new file mode 100644 index 000000000..534a23aa1 --- /dev/null +++ b/meson/test cases/fortran/9 cpp/main.cpp @@ -0,0 +1,8 @@ +#include + +extern "C" double fortran(); + +int main(void) { + std::cout << "FORTRAN gave us this number: " << fortran() << '\n'; + return 0; +} diff --git a/meson/test cases/fortran/9 cpp/meson.build b/meson/test cases/fortran/9 cpp/meson.build new file mode 100644 index 000000000..f96944b79 --- /dev/null +++ b/meson/test cases/fortran/9 cpp/meson.build @@ -0,0 +1,33 @@ +project('C, C++ and Fortran', 'c', 'cpp', 'fortran') + +cpp = meson.get_compiler('cpp') +fc = meson.get_compiler('fortran') + +if cpp.get_id() == 'clang' + error('MESON_SKIP_TEST Clang C++ does not find -lgfortran for some reason.') +endif + +if build_machine.system() == 'windows' and cpp.get_id() != fc.get_id() + error('MESON_SKIP_TEST mixing gfortran with non-GNU C++ does not work.') +endif + +link_with = [] +if fc.get_id() == 'intel' + link_with += fc.find_library('ifport') +endif + +e = executable( + 'cfort', + ['main.c', 'fortran.f'], + dependencies : link_with, +) + +test('C and Fortran', e) + +e2 = executable( + 'cppfort', + ['main.cpp', 'fortran.f'], + dependencies : link_with, +) + +test('C++ and Fortran', e2) diff --git a/meson/test cases/fpga/1 simple/meson.build b/meson/test cases/fpga/1 simple/meson.build new file mode 100644 index 000000000..eff8088ba --- /dev/null +++ b/meson/test cases/fpga/1 simple/meson.build @@ -0,0 +1,9 @@ +project('lattice', 'c') + +is = import('unstable_icestorm') + +is.project('spin', + 'spin.v', + constraint_file : 'spin.pcf', +) + diff --git a/meson/test cases/fpga/1 simple/spin.pcf b/meson/test cases/fpga/1 simple/spin.pcf new file mode 100644 index 000000000..de06f5ddb --- /dev/null +++ b/meson/test cases/fpga/1 simple/spin.pcf @@ -0,0 +1,6 @@ +set_io LED1 99 +set_io LED2 98 +set_io LED3 97 +set_io LED4 96 +set_io LED5 95 +set_io clk 21 diff --git a/meson/test cases/fpga/1 simple/spin.v b/meson/test cases/fpga/1 simple/spin.v new file mode 100644 index 000000000..edc40c426 --- /dev/null +++ b/meson/test cases/fpga/1 simple/spin.v @@ -0,0 +1,32 @@ + +module top(input clk, output LED1, output LED2, output LED3, output LED4, output LED5); + + reg ready = 0; + reg [23:0] divider; + reg [3:0] spin; + + always @(posedge clk) begin + if (ready) + begin + if (divider == 6000000) + begin + divider <= 0; + spin <= {spin[2], spin[3], spin[0], spin[1]}; + end + else + divider <= divider + 1; + end + else + begin + ready <= 1; + spin <= 4'b1010; + divider <= 0; + end + end + + assign LED1 = spin[0]; + assign LED2 = spin[1]; + assign LED3 = spin[2]; + assign LED4 = spin[3]; + assign LED5 = 1; +endmodule diff --git a/meson/test cases/frameworks/1 boost/extralib.cpp b/meson/test cases/frameworks/1 boost/extralib.cpp new file mode 100644 index 000000000..e5ab1b000 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/extralib.cpp @@ -0,0 +1,27 @@ +#define _XOPEN_SOURCE 500 + +#include +#include +#include +#include +#include + +using namespace std; +namespace logging = boost::log; + +void InitLogger() { + logging::add_common_attributes(); + logging::register_simple_formatter_factory("Severity"); + string log_format = "%TimeStamp% [%Severity%] - %Message%"; + + logging::add_console_log( + cout, + logging::keywords::format = log_format + ); +} + +int main(int argc, char **argv) { + InitLogger(); + BOOST_LOG_TRIVIAL(trace) << "SOMETHING"; + return 0; +} diff --git a/meson/test cases/frameworks/1 boost/linkexe.cc b/meson/test cases/frameworks/1 boost/linkexe.cc new file mode 100644 index 000000000..e00edee19 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/linkexe.cc @@ -0,0 +1,18 @@ +#define _XOPEN_SOURCE 500 + +#include + +boost::recursive_mutex m; + +struct callable { + void operator()() { + boost::recursive_mutex::scoped_lock l(m); + }; +}; + +int main(int argc, char **argv) { + callable x; + boost::thread thr(x); + thr.join(); + return 0; +} diff --git a/meson/test cases/frameworks/1 boost/meson.build b/meson/test cases/frameworks/1 boost/meson.build new file mode 100644 index 000000000..83570f09f --- /dev/null +++ b/meson/test cases/frameworks/1 boost/meson.build @@ -0,0 +1,72 @@ +# this test requires the following on Ubuntu: libboost-{system,python,log,thread,test}-dev +project('boosttest', 'cpp', + default_options : ['cpp_std=c++14']) + +s = get_option('static') + +dep = dependency('boost', static: s, required: false) +if not dep.found() + error('MESON_SKIP_TEST boost not found.') +endif + +# We want to have multiple separate configurations of Boost +# within one project. The need to be independent of each other. +# Use one without a library dependency and one with it. + +linkdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time']) +testdep = dependency('boost', static: s, modules : ['unit_test_framework']) +nomoddep = dependency('boost', static: s) +extralibdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time', 'log_setup', 'log', 'filesystem', 'regex']) +notfound = dependency('boost', static: s, modules : ['this_should_not_exist_on_any_systen'], required: false) + +assert(not notfound.found()) + +pymod = import('python') +python2 = pymod.find_installation('python2', required: false , disabler: true) +python3 = pymod.find_installation('python3', required: host_machine.system() == 'linux', disabler: true) +python2dep = python2.dependency(required: false , embed: true, disabler: true) +python3dep = python3.dependency(required: host_machine.system() == 'linux', embed: true, disabler: true) + +# compile python 2/3 modules only if we found a corresponding python version +if(python2dep.found() and host_machine.system() == 'linux' and not s) + bpython2dep = dependency('boost', static: s, modules : ['python'], required: false, disabler: true) +else + python2dep = disabler() + bpython2dep = disabler() +endif + +if(python3dep.found() and host_machine.system() == 'linux' and not s) + bpython3dep = dependency('boost', static: s, modules : ['python3']) +else + python3dep = disabler() + bpython3dep = disabler() +endif + +linkexe = executable('linkedexe', 'linkexe.cc', dependencies : linkdep) +unitexe = executable('utf', 'unit_test.cpp', dependencies: testdep) +nomodexe = executable('nomod', 'nomod.cpp', dependencies : nomoddep) +extralibexe = executable('extralibexe', 'extralib.cpp', dependencies : extralibdep) + +# python modules are shared libraries +python2module = shared_library('python2_module', ['python_module.cpp'], dependencies: [python2dep, bpython2dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python2_module']) +python3module = shared_library('python3_module', ['python_module.cpp'], dependencies: [python3dep, bpython3dep], name_prefix: '', cpp_args: ['-DMOD_NAME=python3_module']) + +test('Boost linktest', linkexe, timeout: 60) +test('Boost UTF test', unitexe, timeout: 60) +test('Boost nomod', nomodexe) +if host_machine.system() != 'darwin' or s + # Segfaults on macOS with dynamic linking since Boost 1.73 + # https://github.com/mesonbuild/meson/issues/7535 + test('Boost extralib test', extralibexe) +endif + +# explicitly use the correct python interpreter so that we don't have to provide two different python scripts that have different shebang lines +python2interpreter = find_program(python2.path(), required: false, disabler: true) +test('Boost Python2', python2interpreter, args: ['./test_python_module.py', meson.current_build_dir()], workdir: meson.current_source_dir(), depends: python2module) +python3interpreter = find_program(python3.path(), required: false, disabler: true) +test('Boost Python3', python3interpreter, args: ['./test_python_module.py', meson.current_build_dir()], workdir: meson.current_source_dir(), depends: python3module) + +subdir('partial_dep') + +# check we can apply a version constraint +dependency('boost', static: s, version: '>=@0@'.format(dep.version())) diff --git a/meson/test cases/frameworks/1 boost/meson_options.txt b/meson/test cases/frameworks/1 boost/meson_options.txt new file mode 100644 index 000000000..019feaf38 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/meson_options.txt @@ -0,0 +1 @@ +option('static', type: 'boolean', value: false) diff --git a/meson/test cases/frameworks/1 boost/nomod.cpp b/meson/test cases/frameworks/1 boost/nomod.cpp new file mode 100644 index 000000000..55c95b25c --- /dev/null +++ b/meson/test cases/frameworks/1 boost/nomod.cpp @@ -0,0 +1,18 @@ +#include +#include + +boost::any get_any() { + boost::any foobar = 3; + return foobar; +} + +int main(int argc, char **argv) { + boost::any result = get_any(); + if(boost::any_cast(result) == 3) { + std::cout << "Everything is fine in the world.\n"; + return 0; + } else { + std::cout << "Mathematics stopped working.\n"; + return 1; + } +} diff --git a/meson/test cases/frameworks/1 boost/partial_dep/foo.cpp b/meson/test cases/frameworks/1 boost/partial_dep/foo.cpp new file mode 100644 index 000000000..da5870386 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/partial_dep/foo.cpp @@ -0,0 +1,20 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "foo.hpp" + +vec Foo::vector() { + return myvec; +} diff --git a/meson/test cases/frameworks/1 boost/partial_dep/foo.hpp b/meson/test cases/frameworks/1 boost/partial_dep/foo.hpp new file mode 100644 index 000000000..393d3f69e --- /dev/null +++ b/meson/test cases/frameworks/1 boost/partial_dep/foo.hpp @@ -0,0 +1,27 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +typedef boost::fusion::vector vec; + + +class Foo { + public: + Foo() {}; + vec vector(); + private: + const vec myvec = vec(4); +}; diff --git a/meson/test cases/frameworks/1 boost/partial_dep/main.cpp b/meson/test cases/frameworks/1 boost/partial_dep/main.cpp new file mode 100644 index 000000000..f31c5ec75 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/partial_dep/main.cpp @@ -0,0 +1,28 @@ +/* Copyright © 2018 Intel Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include "foo.hpp" + + +int main(void) { + auto foo = Foo(); + vec v = foo.vector(); + std::cout << boost::fusion::at_c<0>(v) << std::endl; + + return 0; +} + diff --git a/meson/test cases/frameworks/1 boost/partial_dep/meson.build b/meson/test cases/frameworks/1 boost/partial_dep/meson.build new file mode 100644 index 000000000..9d481bbfe --- /dev/null +++ b/meson/test cases/frameworks/1 boost/partial_dep/meson.build @@ -0,0 +1,31 @@ +# Copyright © 2018 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +dep_boost = dependency('boost') +dep_boost_headers = dep_boost.partial_dependency(compile_args : true) + +libfoo = static_library( + 'foo', + 'foo.cpp', + dependencies : dep_boost_headers, +) + +exe_external_dep = executable( + 'external_dep', + 'main.cpp', + dependencies : dep_boost, + link_with : libfoo +) + +test('External Dependency', exe_external_dep) diff --git a/meson/test cases/frameworks/1 boost/python_module.cpp b/meson/test cases/frameworks/1 boost/python_module.cpp new file mode 100644 index 000000000..a0f010b51 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/python_module.cpp @@ -0,0 +1,22 @@ +#define PY_SSIZE_T_CLEAN +#include +#include + +struct World +{ + void set(std::string msg) { this->msg = msg; } + std::string greet() { return msg; } + std::string version() { return std::to_string(PY_MAJOR_VERSION) + "." + std::to_string(PY_MINOR_VERSION); } + std::string msg; +}; + + +BOOST_PYTHON_MODULE(MOD_NAME) +{ + using namespace boost::python; + class_("World") + .def("greet", &World::greet) + .def("set", &World::set) + .def("version", &World::version) + ; +} diff --git a/meson/test cases/frameworks/1 boost/test.json b/meson/test cases/frameworks/1 boost/test.json new file mode 100644 index 000000000..3145c80d2 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/test.json @@ -0,0 +1,21 @@ +{ + "matrix": { + "options": { + "static": [ + { "val": "true", "skip_on_env": [ "SKIP_STATIC_BOOST" ] }, + { "val": "false" } + ], + "b_vscrt": [ + { "val": null }, + { "val": "md", "compilers": { "cpp": "msvc" } }, + { "val": "mdd", "compilers": { "cpp": "msvc" } }, + { "val": "mt", "compilers": { "cpp": "msvc" } }, + { "val": "mtd", "compilers": { "cpp": "msvc" } } + ] + }, + "exclude": [ + { "static": "false", "b_vscrt": "mt" }, + { "static": "false", "b_vscrt": "mtd" } + ] + } +} diff --git a/meson/test cases/frameworks/1 boost/test_python_module.py b/meson/test cases/frameworks/1 boost/test_python_module.py new file mode 100644 index 000000000..acf6e42d6 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/test_python_module.py @@ -0,0 +1,27 @@ +import sys +sys.path.append(sys.argv[1]) + +# import compiled python module depending on version of python we are running with +if sys.version_info[0] == 2: + import python2_module + +if sys.version_info[0] == 3: + import python3_module + + +def run(): + msg = 'howdy' + if sys.version_info[0] == 2: + w = python2_module.World() + + if sys.version_info[0] == 3: + w = python3_module.World() + + w.set(msg) + + assert(msg == w.greet()) + version_string = str(sys.version_info[0]) + "." + str(sys.version_info[1]) + assert(version_string == w.version()) + +if __name__ == '__main__': + run() diff --git a/meson/test cases/frameworks/1 boost/unit_test.cpp b/meson/test cases/frameworks/1 boost/unit_test.cpp new file mode 100644 index 000000000..fa1fbaa58 --- /dev/null +++ b/meson/test cases/frameworks/1 boost/unit_test.cpp @@ -0,0 +1,9 @@ +#define BOOST_TEST_MODULE "MesonTest" +#define BOOST_TEST_MAIN +#include + +BOOST_AUTO_TEST_CASE(m_test) { + int x = 2+2; + BOOST_CHECK(true); + BOOST_CHECK_EQUAL(x, 4); +} diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar-docs.sgml b/meson/test cases/frameworks/10 gtk-doc/doc/foobar-docs.sgml new file mode 100644 index 000000000..389431751 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar-docs.sgml @@ -0,0 +1,41 @@ + + +]> + + + Foolib Reference Manual + + for Foobar &version; + + + + Jonny + Example + +
+ unknown@example.com +
+
+
+
+ + 2015 + Foobar corporation holdings ltd + +
+ + + Foobar library + + + This part documents Foobar libs. + + + + + + + +
diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-docs.sgml b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-docs.sgml new file mode 100644 index 000000000..6ccd087dc --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-docs.sgml @@ -0,0 +1,41 @@ + + +]> + + + Foolib Reference Manual + + for Foobar &version; + + + + Jonny + Example + +
+ unknown@example.com +
+
+
+
+ + 2015 + Foobar corporation holdings ltd + +
+ + + Foobar library + + + This part documents Foobar libs. + + + + + + + +
diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-sections.txt b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-sections.txt new file mode 100644 index 000000000..d14c8dab0 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar-sections.txt @@ -0,0 +1,16 @@ +
+foo +FooObj +FooObj +FooObjClass +foo_do_something +
+ +
+version +version +FOO_MAJOR_VERSION +FOO_MINOR_VERSION +FOO_MICRO_VERSION +
+ diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar.types b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar.types new file mode 100644 index 000000000..0a9c046f3 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/foobar.types @@ -0,0 +1,4 @@ +% This include is useless it's a regression test for https://github.com/mesonbuild/meson/issues/8744 +#include + +foo_obj_get_type diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/meson.build b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/meson.build new file mode 100644 index 000000000..f4b3724db --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar1/meson.build @@ -0,0 +1,9 @@ +gnome.gtkdoc('foobar', + src_dir : [inc, '.'], + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + dependencies: foo_dep, + # Manually written types file for regression test: + # https://github.com/mesonbuild/meson/issues/8744 + gobject_typesfile: 'foobar.types', + install : true) diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar2/foobar-docs.sgml b/meson/test cases/frameworks/10 gtk-doc/doc/foobar2/foobar-docs.sgml new file mode 100644 index 000000000..95f73efdf --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar2/foobar-docs.sgml @@ -0,0 +1,41 @@ + + +]> + + + Foolib Reference Manual + + for Foobar &version; + + + + Jonny + Example + +
+ unknown@example.com +
+
+
+
+ + 2015 + Foobar corporation holdings ltd + +
+ + + Foobar library + + + This part documents Foobar libs. + + + + + + + +
diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar2/meson.build b/meson/test cases/frameworks/10 gtk-doc/doc/foobar2/meson.build new file mode 100644 index 000000000..0b2faa023 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar2/meson.build @@ -0,0 +1,6 @@ +gnome.gtkdoc('foobar2', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true, + install_dir : 'foobar2') diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar3/foobar-docs.sgml b/meson/test cases/frameworks/10 gtk-doc/doc/foobar3/foobar-docs.sgml new file mode 100644 index 000000000..95f73efdf --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar3/foobar-docs.sgml @@ -0,0 +1,41 @@ + + +]> + + + Foolib Reference Manual + + for Foobar &version; + + + + Jonny + Example + +
+ unknown@example.com +
+
+
+
+ + 2015 + Foobar corporation holdings ltd + +
+ + + Foobar library + + + This part documents Foobar libs. + + + + + + + +
diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar3/meson.build b/meson/test cases/frameworks/10 gtk-doc/doc/foobar3/meson.build new file mode 100644 index 000000000..0dce2f837 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar3/meson.build @@ -0,0 +1,6 @@ +gnome.gtkdoc('foobar', + module_version : '3.0', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true) diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar4/foobar-docs.sgml b/meson/test cases/frameworks/10 gtk-doc/doc/foobar4/foobar-docs.sgml new file mode 100644 index 000000000..95f73efdf --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar4/foobar-docs.sgml @@ -0,0 +1,41 @@ + + +]> + + + Foolib Reference Manual + + for Foobar &version; + + + + Jonny + Example + +
+ unknown@example.com +
+
+
+
+ + 2015 + Foobar corporation holdings ltd + +
+ + + Foobar library + + + This part documents Foobar libs. + + + + + + + +
diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/foobar4/meson.build b/meson/test cases/frameworks/10 gtk-doc/doc/foobar4/meson.build new file mode 100644 index 000000000..959e50730 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/foobar4/meson.build @@ -0,0 +1,7 @@ +gnome.gtkdoc('foobar2', + module_version : '3.0', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true, + install_dir : 'foobar3') diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/meson.build b/meson/test cases/frameworks/10 gtk-doc/doc/meson.build new file mode 100644 index 000000000..c001f8964 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/meson.build @@ -0,0 +1,10 @@ +cdata = configuration_data() +cdata.set('VERSION', '1.0') +version_xml = configure_file(input : 'version.xml.in', + output : 'version.xml', + configuration : cdata) + +subdir('foobar1') +subdir('foobar2') +subdir('foobar3') +subdir('foobar4') diff --git a/meson/test cases/frameworks/10 gtk-doc/doc/version.xml.in b/meson/test cases/frameworks/10 gtk-doc/doc/version.xml.in new file mode 100644 index 000000000..d78bda934 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/doc/version.xml.in @@ -0,0 +1 @@ +@VERSION@ diff --git a/meson/test cases/frameworks/10 gtk-doc/foo.c b/meson/test cases/frameworks/10 gtk-doc/foo.c new file mode 100644 index 000000000..36c0639ec --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/foo.c @@ -0,0 +1,30 @@ +#include + + +struct _FooObj { + GObject parent; + int dummy; +}; + +G_DEFINE_TYPE(FooObj, foo_obj, G_TYPE_OBJECT) + +static void foo_obj_init (FooObj *self) +{ +} + +static void foo_obj_class_init (FooObjClass *klass) +{ +} + +/** + * foo_do_something: + * @self: self + * + * Useless function. + * + * Returns: 0. + */ +int foo_do_something(FooObj *self) +{ + return 0; +} diff --git a/meson/test cases/frameworks/10 gtk-doc/include/foo-version.h.in b/meson/test cases/frameworks/10 gtk-doc/include/foo-version.h.in new file mode 100644 index 000000000..30751cd75 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/include/foo-version.h.in @@ -0,0 +1,29 @@ +#pragma once + +/** + * SECTION:version + * @section_id: foo-version + * @short_description: foo-version.h + * @title: Foo Versioning + */ + +/** + * FOO_MAJOR_VERSION: + * + * The major version of foo. + */ +#define FOO_MAJOR_VERSION (@FOO_MAJOR_VERSION@) + +/** + * FOO_MINOR_VERSION: + * + * The minor version of foo. + */ +#define FOO_MINOR_VERSION (@FOO_MINOR_VERSION@) + +/** + * FOO_MICRO_VERSION: + * + * The micro version of foo. + */ +#define FOO_MICRO_VERSION (@FOO_MICRO_VERSION@) diff --git a/meson/test cases/frameworks/10 gtk-doc/include/foo.h b/meson/test cases/frameworks/10 gtk-doc/include/foo.h new file mode 100644 index 000000000..510f3d1ec --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/include/foo.h @@ -0,0 +1,33 @@ +#pragma once + +#include + +/** + * FooIndecision: + * @FOO_MAYBE: Something maybe + * @FOO_POSSIBLY: Something possible + * + * The indecision type. + **/ + +typedef enum { + FOO_MAYBE, + FOO_POSSIBLY, +} FooIndecision; + +/** + * FooObjClass: + * + * The class + */ + +/** + * FooObj: + * + * The instance + */ + +#define FOO_TYPE_OBJ foo_obj_get_type() +G_DECLARE_FINAL_TYPE(FooObj, foo_obj, FOO, OBJ, GObject) + +int foo_do_something(FooObj *self); diff --git a/meson/test cases/frameworks/10 gtk-doc/include/generate-enums-docbook.py b/meson/test cases/frameworks/10 gtk-doc/include/generate-enums-docbook.py new file mode 100644 index 000000000..41c6121b2 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/include/generate-enums-docbook.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +import sys + +DOC_HEADER = ''' + + + + + + {0} + {0} + + + {0} + + + + + enum {1} + + {1} + + {1} + + Values + + + + + + +''' + +DOC_ENUM = ''' + {0} + = {1} + + ''' + +DOC_FOOTER = ''' + + + + + + +''' + +if __name__ == '__main__': + if len(sys.argv) >= 4: + with open(sys.argv[1], 'w') as doc_out: + enum_name = sys.argv[2] + enum_type = sys.argv[3] + + doc_out.write(DOC_HEADER.format(enum_name, enum_type)) + for i, enum in enumerate(sys.argv[4:]): + doc_out.write(DOC_ENUM.format(enum, i)) + doc_out.write(DOC_FOOTER) + else: + print('Use: ' + sys.argv[0] + ' out name type [enums]') + + sys.exit(0) diff --git a/meson/test cases/frameworks/10 gtk-doc/include/meson.build b/meson/test cases/frameworks/10 gtk-doc/include/meson.build new file mode 100644 index 000000000..aa328850e --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/include/meson.build @@ -0,0 +1,17 @@ +cdata = configuration_data() +parts = meson.project_version().split('.') +cdata.set('FOO_MAJOR_VERSION', parts[0]) +cdata.set('FOO_MINOR_VERSION', parts[1]) +cdata.set('FOO_MICRO_VERSION', parts[2]) +configure_file(input : 'foo-version.h.in', + output : 'foo-version.h', + configuration : cdata, + install : true, + install_dir : get_option('includedir')) + +generate_enums_docbook = find_program('generate-enums-docbook.py') + +docbook = custom_target('enum-docbook', + output : 'bar.xml', + command : [generate_enums_docbook, '@OUTPUT@', 'BAR', 'BAR_TYPE', 'BAR_FOO'], + build_by_default : true) diff --git a/meson/test cases/frameworks/10 gtk-doc/meson.build b/meson/test cases/frameworks/10 gtk-doc/meson.build new file mode 100644 index 000000000..43ee929a1 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/meson.build @@ -0,0 +1,39 @@ +project('gtkdoctest', 'c', version : '1.0.0') + +gtkdoc = find_program('gtkdoc-scan', required: false) +if not gtkdoc.found() + error('MESON_SKIP_TEST gtkdoc not found.') +endif + +gnome = import('gnome') + +assert(gnome.gtkdoc_html_dir('foobar') == 'share/gtk-doc/html/foobar', 'Gtkdoc install dir is incorrect.') + +inc = include_directories('include') + +subdir('include') + +# disable this test unless a bug fix for spaces in pathnames is present +# https://bugzilla.gnome.org/show_bug.cgi?id=753145 +result = run_command(gtkdoc, ['--version']) +gtkdoc_ver = result.stdout().strip() +if gtkdoc_ver == '' + gtkdoc_ver = result.stderr().strip() +endif +if gtkdoc_ver.version_compare('<1.26') + error('MESON_SKIP_TEST gtk-doc test requires gtkdoc >= 1.26.') +endif + +gobject = dependency('gobject-2.0') + +libfoo = shared_library('foo', 'foo.c', + include_directories: inc, + dependencies: gobject, +) + +foo_dep = declare_dependency( + link_with: libfoo, + include_directories: inc, +) + +subdir('doc') diff --git a/meson/test cases/frameworks/10 gtk-doc/test.json b/meson/test cases/frameworks/10 gtk-doc/test.json new file mode 100644 index 000000000..03ad05958 --- /dev/null +++ b/meson/test cases/frameworks/10 gtk-doc/test.json @@ -0,0 +1,61 @@ +{ + "installed": [ + {"type": "file", "file": "usr/include/foo-version.h"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/BAR.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foobar.devhelp2"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foobar.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/FooObj.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/foo-version.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/home.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/index.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/left.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/left-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/right.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/right-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/style.css"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/up.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar/up-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/BAR.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar2.devhelp2"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar2-foo.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/foobar2-foo-version.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/home.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/index.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/left.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/left-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/right.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/right-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/style.css"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/up.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar2/up-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/BAR.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar-3.0.devhelp2"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar-foo.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/foobar-foo-version.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/home.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/index.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/left.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/left-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/right.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/right-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/style.css"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/up.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar-3.0/up-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/BAR.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar2-3.0.devhelp2"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar2-foo.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/foobar2-foo-version.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/home.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/index.html"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/left.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/left-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/right.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/right-insensitive.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/style.css"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/up.png"}, + {"type": "file", "file": "usr/share/gtk-doc/html/foobar3/up-insensitive.png"} + ] +} diff --git a/meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.c b/meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.c new file mode 100644 index 000000000..2d58a1039 --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.c @@ -0,0 +1,124 @@ +#include "meson-subsample.h" + +struct _MesonSubSample +{ + MesonSample parent_instance; + + gchar *msg; +}; + +G_DEFINE_TYPE (MesonSubSample, meson_sub_sample, MESON_TYPE_SAMPLE) + +enum { + PROP_0, + PROP_MSG, + LAST_PROP +}; + +static GParamSpec *gParamSpecs [LAST_PROP]; + +/** + * meson_sub_sample_new: + * @msg: The message to set. + * + * Allocates a new #MesonSubSample. + * + * Returns: (transfer full): a #MesonSubSample. + */ +MesonSubSample * +meson_sub_sample_new (const gchar *msg) +{ + g_return_val_if_fail (msg != NULL, NULL); + + return g_object_new (MESON_TYPE_SUB_SAMPLE, + "message", msg, + NULL); +} + +static void +meson_sub_sample_finalize (GObject *object) +{ + MesonSubSample *self = (MesonSubSample *)object; + + g_clear_pointer (&self->msg, g_free); + + G_OBJECT_CLASS (meson_sub_sample_parent_class)->finalize (object); +} + +static void +meson_sub_sample_get_property (GObject *object, + guint prop_id, + GValue *value, + GParamSpec *pspec) +{ + MesonSubSample *self = MESON_SUB_SAMPLE (object); + + switch (prop_id) + { + case PROP_MSG: + g_value_set_string (value, self->msg); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sub_sample_set_property (GObject *object, + guint prop_id, + const GValue *value, + GParamSpec *pspec) +{ + MesonSubSample *self = MESON_SUB_SAMPLE (object); + + switch (prop_id) + { + case PROP_MSG: + self->msg = g_value_dup_string (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sub_sample_class_init (MesonSubSampleClass *klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + object_class->finalize = meson_sub_sample_finalize; + object_class->get_property = meson_sub_sample_get_property; + object_class->set_property = meson_sub_sample_set_property; + + gParamSpecs [PROP_MSG] = + g_param_spec_string ("message", + "Message", + "The message to print.", + NULL, + (G_PARAM_READWRITE | + G_PARAM_CONSTRUCT_ONLY | + G_PARAM_STATIC_STRINGS)); + + g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs); +} + +static void +meson_sub_sample_init (MesonSubSample *self) +{ +} + +/** + * meson_sub_sample_print_message: + * @self: a #MesonSubSample. + * + * Prints the message. + * + * Returns: Nothing. + */ +void +meson_sub_sample_print_message (MesonSubSample *self) +{ + g_return_if_fail (MESON_IS_SUB_SAMPLE (self)); + + g_print ("Message: %s\n", self->msg); +} diff --git a/meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.h b/meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.h new file mode 100644 index 000000000..666d59ffa --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/gir/meson-subsample.h @@ -0,0 +1,21 @@ +#ifndef MESON_SUB_SAMPLE_H +#define MESON_SUB_SAMPLE_H + +#if !defined (MESON_TEST) +#error "MESON_TEST not defined." +#endif + +#include +#include + +G_BEGIN_DECLS + +#define MESON_TYPE_SUB_SAMPLE (meson_sub_sample_get_type()) + +G_DECLARE_FINAL_TYPE (MesonSubSample, meson_sub_sample, MESON, SUB_SAMPLE, MesonSample) + +MesonSubSample *meson_sub_sample_new (const gchar *msg); + +G_END_DECLS + +#endif /* MESON_SUB_SAMPLE_H */ diff --git a/meson/test cases/frameworks/11 gir subproject/gir/meson.build b/meson/test cases/frameworks/11 gir subproject/gir/meson.build new file mode 100644 index 000000000..fe40dc6a7 --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/gir/meson.build @@ -0,0 +1,40 @@ +libsources = ['meson-subsample.c', 'meson-subsample.h'] + +girsubproject = shared_library( + 'girsubproject', + sources : libsources, + dependencies : [gobj, meson_gir], + install : true +) + +girexe = executable( + 'girprog', + sources : 'prog.c', + dependencies : [gobj, meson_gir], + link_with : girsubproject +) + +gnome.generate_gir( + girsubproject, + sources : libsources, + dependencies : [gobj, meson_gir], + nsversion : '1.0', + namespace : 'MesonSub', + symbol_prefix : 'meson_sub_', + identifier_prefix : 'MesonSub', + includes : ['GObject-2.0', 'Meson-1.0'], + install : true +) + +message('TEST: ' + girsubproject.outdir()) + +envdata = environment() +envdata.append('GI_TYPELIB_PATH', girsubproject.outdir(), 'subprojects/mesongir', separator : ':') +envdata.append('LD_LIBRARY_PATH', girsubproject.outdir(), 'subprojects/mesongir') +if ['windows', 'cygwin'].contains(host_machine.system()) + envdata.append('PATH', girsubproject.outdir(), 'subprojects/mesongir') +endif + +test('gobject introspection/subproject/c', girexe) +test('gobject introspection/subproject/py', find_program('prog.py'), + env : envdata) diff --git a/meson/test cases/frameworks/11 gir subproject/gir/prog.c b/meson/test cases/frameworks/11 gir subproject/gir/prog.c new file mode 100644 index 000000000..f25c9d85c --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/gir/prog.c @@ -0,0 +1,12 @@ +#include "meson-subsample.h" + +gint +main (gint argc, + gchar *argv[]) +{ + MesonSample * i = (MesonSample*) meson_sub_sample_new ("Hello, sub/meson/c!"); + meson_sample_print_message (i); + g_object_unref (i); + + return 0; +} diff --git a/meson/test cases/frameworks/11 gir subproject/gir/prog.py b/meson/test cases/frameworks/11 gir subproject/gir/prog.py new file mode 100755 index 000000000..ea4da5b5e --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/gir/prog.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 +from gi.repository import MesonSub + +if __name__ == "__main__": + s = MesonSub.Sample.new("Hello, sub/meson/py!") + s.print_message() diff --git a/meson/test cases/frameworks/11 gir subproject/meson.build b/meson/test cases/frameworks/11 gir subproject/meson.build new file mode 100644 index 000000000..a599ae9eb --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/meson.build @@ -0,0 +1,20 @@ +project('gobject-introspection-with-subproject', 'c') + +gir = find_program('g-ir-scanner', required: false) +if not gir.found() + error('MESON_SKIP_TEST g-ir-scanner not found.') +endif + +python3 = import('python3') +py3 = python3.find_python() +if run_command(py3, '-c', 'import gi;').returncode() != 0 + error('MESON_SKIP_TEST python3-gi not found') +endif + +gnome = import('gnome') +gobj = dependency('gobject-2.0') + +add_global_arguments('-DMESON_TEST', language : 'c') +meson_gir = dependency('meson-gir', fallback : ['mesongir', 'meson_gir']) + +subdir('gir') diff --git a/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.c b/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.c new file mode 100644 index 000000000..2e78b076f --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.c @@ -0,0 +1,127 @@ +#include "meson-sample.h" + +typedef struct _MesonSamplePrivate +{ + gchar *msg; +} MesonSamplePrivate; + + +G_DEFINE_TYPE_WITH_PRIVATE (MesonSample, meson_sample, G_TYPE_OBJECT) + +enum { + PROP_0, + PROP_MSG, + LAST_PROP +}; + +static GParamSpec *gParamSpecs [LAST_PROP]; + +/** + * meson_sample_new: + * @msg: The message to set. + * + * Allocates a new #MesonSample. + * + * Returns: (transfer full): a #MesonSample. + */ +MesonSample * +meson_sample_new (const gchar *msg) +{ + g_return_val_if_fail (msg != NULL, NULL); + + return g_object_new (MESON_TYPE_SAMPLE, + "message", msg, + NULL); +} + +static void +meson_sample_finalize (GObject *object) +{ + MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object); + + g_clear_pointer (&priv->msg, g_free); + + G_OBJECT_CLASS (meson_sample_parent_class)->finalize (object); +} + +static void +meson_sample_get_property (GObject *object, + guint prop_id, + GValue *value, + GParamSpec *pspec) +{ + MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object); + + switch (prop_id) + { + case PROP_MSG: + g_value_set_string (value, priv->msg); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sample_set_property (GObject *object, + guint prop_id, + const GValue *value, + GParamSpec *pspec) +{ + MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object); + + switch (prop_id) + { + case PROP_MSG: + priv->msg = g_value_dup_string (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sample_class_init (MesonSampleClass *klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + object_class->finalize = meson_sample_finalize; + object_class->get_property = meson_sample_get_property; + object_class->set_property = meson_sample_set_property; + + gParamSpecs [PROP_MSG] = + g_param_spec_string ("message", + "Message", + "The message to print.", + NULL, + (G_PARAM_READWRITE | + G_PARAM_CONSTRUCT_ONLY | + G_PARAM_STATIC_STRINGS)); + + g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs); +} + +static void +meson_sample_init (MesonSample *self) +{ +} + +/** + * meson_sample_print_message: + * @self: a #MesonSample. + * + * Prints the message. + * + * Returns: Nothing. + */ +void +meson_sample_print_message (MesonSample *self) +{ + MesonSamplePrivate *priv; + + g_return_if_fail (MESON_IS_SAMPLE (self)); + + priv = meson_sample_get_instance_private (self); + + g_print ("Message: %s\n", priv->msg); +} diff --git a/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.h b/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.h new file mode 100644 index 000000000..e4c07a8cf --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson-sample.h @@ -0,0 +1,26 @@ +#ifndef MESON_SAMPLE_H +#define MESON_SAMPLE_H + +#if !defined (MESON_TEST) +#error "MESON_TEST not defined." +#endif + +#include + +G_BEGIN_DECLS + +#define MESON_TYPE_SAMPLE (meson_sample_get_type()) + +G_DECLARE_DERIVABLE_TYPE (MesonSample, meson_sample, MESON, SAMPLE, GObject) + +struct _MesonSampleClass { + GObjectClass parent_class; +}; + + +MesonSample *meson_sample_new (const gchar *msg); +void meson_sample_print_message (MesonSample *self); + +G_END_DECLS + +#endif /* MESON_SAMPLE_H */ diff --git a/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson.build b/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson.build new file mode 100644 index 000000000..027b4ee80 --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/subprojects/mesongir/meson.build @@ -0,0 +1,31 @@ +project('gobject-introspection-subproject', 'c') + +gnome = import('gnome') +gobj = dependency('gobject-2.0') + +libsources = ['meson-sample.c', 'meson-sample.h'] + +girlib = shared_library( + 'girlib', + sources : libsources, + dependencies : gobj, + install : true +) + +girtarget = gnome.generate_gir( + girlib, + sources : libsources, + nsversion : '1.0', + namespace : 'Meson', + symbol_prefix : 'meson_', + identifier_prefix : 'Meson', + includes : ['GObject-2.0'], + install : true +) + +meson_gir = declare_dependency(link_with : girlib, + include_directories : [include_directories('.')], + dependencies : [gobj], + # Everything that uses libgst needs this built to compile + sources : girtarget, +) diff --git a/meson/test cases/frameworks/11 gir subproject/test.json b/meson/test cases/frameworks/11 gir subproject/test.json new file mode 100644 index 000000000..e94152efa --- /dev/null +++ b/meson/test cases/frameworks/11 gir subproject/test.json @@ -0,0 +1,12 @@ +{ + "installed": [ + {"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"}, + {"type": "file", "file": "usr/lib/girepository-1.0/MesonSub-1.0.typelib"}, + {"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"}, + {"type": "file", "file": "usr/share/gir-1.0/MesonSub-1.0.gir"}, + {"type": "expr", "file": "usr/lib/?libgirsubproject.so"}, + {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirlib.dll.a"}, + {"type": "expr", "file": "usr/lib/?libgirlib.so"}, + {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirsubproject.dll.a"} + ] +} diff --git a/meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.c b/meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.c new file mode 100644 index 000000000..2d58a1039 --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.c @@ -0,0 +1,124 @@ +#include "meson-subsample.h" + +struct _MesonSubSample +{ + MesonSample parent_instance; + + gchar *msg; +}; + +G_DEFINE_TYPE (MesonSubSample, meson_sub_sample, MESON_TYPE_SAMPLE) + +enum { + PROP_0, + PROP_MSG, + LAST_PROP +}; + +static GParamSpec *gParamSpecs [LAST_PROP]; + +/** + * meson_sub_sample_new: + * @msg: The message to set. + * + * Allocates a new #MesonSubSample. + * + * Returns: (transfer full): a #MesonSubSample. + */ +MesonSubSample * +meson_sub_sample_new (const gchar *msg) +{ + g_return_val_if_fail (msg != NULL, NULL); + + return g_object_new (MESON_TYPE_SUB_SAMPLE, + "message", msg, + NULL); +} + +static void +meson_sub_sample_finalize (GObject *object) +{ + MesonSubSample *self = (MesonSubSample *)object; + + g_clear_pointer (&self->msg, g_free); + + G_OBJECT_CLASS (meson_sub_sample_parent_class)->finalize (object); +} + +static void +meson_sub_sample_get_property (GObject *object, + guint prop_id, + GValue *value, + GParamSpec *pspec) +{ + MesonSubSample *self = MESON_SUB_SAMPLE (object); + + switch (prop_id) + { + case PROP_MSG: + g_value_set_string (value, self->msg); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sub_sample_set_property (GObject *object, + guint prop_id, + const GValue *value, + GParamSpec *pspec) +{ + MesonSubSample *self = MESON_SUB_SAMPLE (object); + + switch (prop_id) + { + case PROP_MSG: + self->msg = g_value_dup_string (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sub_sample_class_init (MesonSubSampleClass *klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + object_class->finalize = meson_sub_sample_finalize; + object_class->get_property = meson_sub_sample_get_property; + object_class->set_property = meson_sub_sample_set_property; + + gParamSpecs [PROP_MSG] = + g_param_spec_string ("message", + "Message", + "The message to print.", + NULL, + (G_PARAM_READWRITE | + G_PARAM_CONSTRUCT_ONLY | + G_PARAM_STATIC_STRINGS)); + + g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs); +} + +static void +meson_sub_sample_init (MesonSubSample *self) +{ +} + +/** + * meson_sub_sample_print_message: + * @self: a #MesonSubSample. + * + * Prints the message. + * + * Returns: Nothing. + */ +void +meson_sub_sample_print_message (MesonSubSample *self) +{ + g_return_if_fail (MESON_IS_SUB_SAMPLE (self)); + + g_print ("Message: %s\n", self->msg); +} diff --git a/meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.h b/meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.h new file mode 100644 index 000000000..9d34a0824 --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/gir/meson-subsample.h @@ -0,0 +1,17 @@ +#ifndef MESON_SUB_SAMPLE_H +#define MESON_SUB_SAMPLE_H + +#include +#include + +G_BEGIN_DECLS + +#define MESON_TYPE_SUB_SAMPLE (meson_sub_sample_get_type()) + +G_DECLARE_FINAL_TYPE (MesonSubSample, meson_sub_sample, MESON, SUB_SAMPLE, MesonSample) + +MesonSubSample *meson_sub_sample_new (const gchar *msg); + +G_END_DECLS + +#endif /* MESON_SUB_SAMPLE_H */ diff --git a/meson/test cases/frameworks/12 multiple gir/gir/meson.build b/meson/test cases/frameworks/12 multiple gir/gir/meson.build new file mode 100644 index 000000000..6001a099f --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/gir/meson.build @@ -0,0 +1,30 @@ +libsources = ['meson-subsample.c', 'meson-subsample.h'] + +girsubproject = shared_library( + 'girsubproject', + sources : libsources, + dependencies : [gobj, girlib_dep], + install : true +) + +girexe = executable( + 'girprog', + sources : 'prog.c', + dependencies : [gobj, girlib_dep], + link_with : girsubproject +) + +gnome.generate_gir( + girsubproject, + sources : libsources, + nsversion : '1.0', + namespace : 'MesonSub', + symbol_prefix : 'meson_sub_', + identifier_prefix : 'MesonSub', + includes : ['GObject-2.0', meson_gir], + install : true +) + +message('TEST: ' + girsubproject.outdir()) + +test('gobject introspection/subproject/c', girexe) diff --git a/meson/test cases/frameworks/12 multiple gir/gir/prog.c b/meson/test cases/frameworks/12 multiple gir/gir/prog.c new file mode 100644 index 000000000..f25c9d85c --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/gir/prog.c @@ -0,0 +1,12 @@ +#include "meson-subsample.h" + +gint +main (gint argc, + gchar *argv[]) +{ + MesonSample * i = (MesonSample*) meson_sub_sample_new ("Hello, sub/meson/c!"); + meson_sample_print_message (i); + g_object_unref (i); + + return 0; +} diff --git a/meson/test cases/frameworks/12 multiple gir/meson.build b/meson/test cases/frameworks/12 multiple gir/meson.build new file mode 100644 index 000000000..ddc9830d5 --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/meson.build @@ -0,0 +1,12 @@ +project('multiple-gobject-introspection', 'c') + +gir = find_program('g-ir-scanner', required: false) +if not gir.found() + error('MESON_SKIP_TEST g-ir-scanner not found.') +endif + +gnome = import('gnome') +gobj = dependency('gobject-2.0') + +subdir('mesongir') +subdir('gir') diff --git a/meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.c b/meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.c new file mode 100644 index 000000000..2ed9cdf72 --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.c @@ -0,0 +1,126 @@ +#include "meson-sample.h" + +typedef struct _MesonSamplePrivate +{ + gchar *msg; +} MesonSamplePrivate; + + +G_DEFINE_TYPE_WITH_PRIVATE (MesonSample, meson_sample, G_TYPE_OBJECT) + +enum { + PROP_0, + PROP_MSG, + LAST_PROP +}; + +static GParamSpec *gParamSpecs [LAST_PROP]; + +/** + * meson_sample_new: + * @msg: The message to set. + * + * Allocates a new #MesonSample. + * + * Returns: (transfer full): a #MesonSample. + */ +MesonSample * +meson_sample_new (const gchar *msg) +{ + g_return_val_if_fail (msg != NULL, NULL); + + return g_object_new (MESON_TYPE_SAMPLE, + "message", msg, + NULL); +} + +static void +meson_sample_finalize (GObject *object) +{ + MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object); + + g_clear_pointer (&priv->msg, g_free); + + G_OBJECT_CLASS (meson_sample_parent_class)->finalize (object); +} + +static void +meson_sample_get_property (GObject *object, + guint prop_id, + GValue *value, + GParamSpec *pspec) +{ + MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object); + + switch (prop_id) + { + case PROP_MSG: + g_value_set_string (value, priv->msg); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sample_set_property (GObject *object, + guint prop_id, + const GValue *value, + GParamSpec *pspec) +{ + MesonSamplePrivate *priv = meson_sample_get_instance_private ((MesonSample *) object); + + switch (prop_id) + { + case PROP_MSG: + priv->msg = g_value_dup_string (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_sample_class_init (MesonSampleClass *klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + object_class->finalize = meson_sample_finalize; + object_class->get_property = meson_sample_get_property; + object_class->set_property = meson_sample_set_property; + + gParamSpecs [PROP_MSG] = + g_param_spec_string ("message", + "Message", + "The message to print.", + NULL, + (G_PARAM_READWRITE | + G_PARAM_CONSTRUCT_ONLY | + G_PARAM_STATIC_STRINGS)); + + g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs); +} + +static void +meson_sample_init (MesonSample *self) +{ +} + +/** + * meson_sample_print_message: + * @self: a #MesonSample. + * + * Prints the message. + * + */ +void +meson_sample_print_message (MesonSample *self) +{ + MesonSamplePrivate *priv; + + g_return_if_fail (MESON_IS_SAMPLE (self)); + + priv = meson_sample_get_instance_private (self); + + g_print ("Message: %s\n", priv->msg); +} diff --git a/meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.h.in b/meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.h.in new file mode 100644 index 000000000..d0ab29e6f --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/mesongir/meson-sample.h.in @@ -0,0 +1,22 @@ +#ifndef MESON_SAMPLE_H +#define MESON_SAMPLE_H + +#include <@HEADER@> + +G_BEGIN_DECLS + +#define MESON_TYPE_SAMPLE (meson_sample_get_type()) + +G_DECLARE_DERIVABLE_TYPE (MesonSample, meson_sample, MESON, SAMPLE, GObject) + +struct _MesonSampleClass { + GObjectClass parent_class; +}; + + +MesonSample *meson_sample_new (const gchar *msg); +void meson_sample_print_message (MesonSample *self); + +G_END_DECLS + +#endif /* MESON_SAMPLE_H */ diff --git a/meson/test cases/frameworks/12 multiple gir/mesongir/meson.build b/meson/test cases/frameworks/12 multiple gir/mesongir/meson.build new file mode 100644 index 000000000..3ca4333d2 --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/mesongir/meson.build @@ -0,0 +1,38 @@ +conf = configuration_data() +conf.set('HEADER', 'glib-object.h') + +meson_sample_header = configure_file( + input : 'meson-sample.h.in', + output : 'meson-sample.h', + configuration : conf) + +libsources = ['meson-sample.c', meson_sample_header] + +girlib = shared_library( + 'girlib', + sources : libsources, + dependencies : gobj, + install : true +) + +girtarget = gnome.generate_gir( + girlib, + sources : libsources, + nsversion : '1.0', + namespace : 'Meson', + symbol_prefix : 'meson_', + identifier_prefix : 'Meson', + includes : ['GObject-2.0'], + export_packages : 'meson', + install : true +) +meson_gir = girtarget[0] +meson_typelib = girtarget[1] + +girlib_inc = include_directories('.') +girlib_dep = declare_dependency(link_with : girlib, + include_directories : [girlib_inc], + dependencies : [gobj], + # Everything that uses libgst needs this built to compile + sources : girtarget, +) diff --git a/meson/test cases/frameworks/12 multiple gir/test.json b/meson/test cases/frameworks/12 multiple gir/test.json new file mode 100644 index 000000000..4e3624c19 --- /dev/null +++ b/meson/test cases/frameworks/12 multiple gir/test.json @@ -0,0 +1,12 @@ +{ + "installed": [ + {"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"}, + {"type": "file", "file": "usr/lib/girepository-1.0/MesonSub-1.0.typelib"}, + {"type": "expr", "file": "usr/lib/?libgirlib.so"}, + {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirlib.dll.a"}, + {"type": "expr", "file": "usr/lib/?libgirsubproject.so"}, + {"type": "file", "platform": "cygwin", "file": "usr/lib/libgirsubproject.dll.a"}, + {"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"}, + {"type": "file", "file": "usr/share/gir-1.0/MesonSub-1.0.gir"} + ] +} diff --git a/meson/test cases/frameworks/13 yelp/help/C/index.page b/meson/test cases/frameworks/13 yelp/help/C/index.page new file mode 100644 index 000000000..1b367e678 --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/C/index.page @@ -0,0 +1,8 @@ + + + Hello! + + diff --git a/meson/test cases/frameworks/13 yelp/help/C/media/test.txt b/meson/test cases/frameworks/13 yelp/help/C/media/test.txt new file mode 100644 index 000000000..ce0136250 --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/C/media/test.txt @@ -0,0 +1 @@ +hello diff --git a/meson/test cases/frameworks/13 yelp/help/LINGUAS b/meson/test cases/frameworks/13 yelp/help/LINGUAS new file mode 100644 index 000000000..173f97890 --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/LINGUAS @@ -0,0 +1,2 @@ +de +es diff --git a/meson/test cases/frameworks/13 yelp/help/de/de.po b/meson/test cases/frameworks/13 yelp/help/de/de.po new file mode 100644 index 000000000..a54ce7fda --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/de/de.po @@ -0,0 +1,13 @@ +msgid "" +msgstr "" +"Project-Id-Version: meson master\n" +"Language: de\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#. (itstool) path: page/title +#: C/index.page:5 +msgid "Hello!" +msgstr "Hallo!" diff --git a/meson/test cases/frameworks/13 yelp/help/es/es.po b/meson/test cases/frameworks/13 yelp/help/es/es.po new file mode 100644 index 000000000..b69ce7fc3 --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/es/es.po @@ -0,0 +1,13 @@ +msgid "" +msgstr "" +"Project-Id-Version: meson master\n" +"Language: es\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#. (itstool) path: page/title +#: C/index.page:5 +msgid "Hello!" +msgstr "¡Hola!" diff --git a/meson/test cases/frameworks/13 yelp/help/es/media/test.txt b/meson/test cases/frameworks/13 yelp/help/es/media/test.txt new file mode 100644 index 000000000..3453b0015 --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/es/media/test.txt @@ -0,0 +1 @@ +Hola. diff --git a/meson/test cases/frameworks/13 yelp/help/meson.build b/meson/test cases/frameworks/13 yelp/help/meson.build new file mode 100644 index 000000000..c8edd61ba --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/help/meson.build @@ -0,0 +1,21 @@ +gnome = import('gnome') + +gnome.yelp('meson', + sources: 'index.page', + media: 'media/test.txt', + symlink_media: false, + languages: ['de', 'es'], +) + +gnome.yelp('meson-symlink', + sources: 'index.page', + media: 'media/test.txt', + symlink_media: true, + languages: ['de', 'es'], +) + +gnome.yelp('meson-linguas', + sources: 'index.page', + media: 'media/test.txt', + symlink_media: false, +) diff --git a/meson/test cases/frameworks/13 yelp/meson.build b/meson/test cases/frameworks/13 yelp/meson.build new file mode 100644 index 000000000..9fdde25ae --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/meson.build @@ -0,0 +1,8 @@ +project('yelp', 'c') + +itstool = find_program('itstool', required: false) +if not itstool.found() + error('MESON_SKIP_TEST itstool not found.') +endif + +subdir('help') diff --git a/meson/test cases/frameworks/13 yelp/test.json b/meson/test cases/frameworks/13 yelp/test.json new file mode 100644 index 000000000..070fb321b --- /dev/null +++ b/meson/test cases/frameworks/13 yelp/test.json @@ -0,0 +1,22 @@ +{ + "installed": [ + {"type": "file", "file": "usr/share/help/C/meson/index.page"}, + {"type": "file", "file": "usr/share/help/C/meson/media/test.txt"}, + {"type": "file", "file": "usr/share/help/es/meson/index.page"}, + {"type": "file", "file": "usr/share/help/es/meson/media/test.txt"}, + {"type": "file", "file": "usr/share/help/de/meson/index.page"}, + {"type": "file", "file": "usr/share/help/de/meson/media/test.txt"}, + {"type": "file", "file": "usr/share/help/C/meson-symlink/index.page"}, + {"type": "file", "file": "usr/share/help/C/meson-symlink/media/test.txt"}, + {"type": "file", "file": "usr/share/help/es/meson-symlink/media/test.txt"}, + {"type": "file", "file": "usr/share/help/es/meson-symlink/index.page"}, + {"type": "file", "file": "usr/share/help/de/meson-symlink/index.page"}, + {"type": "file", "file": "usr/share/help/de/meson-symlink/media/test.txt"}, + {"type": "file", "file": "usr/share/help/C/meson-linguas/index.page"}, + {"type": "file", "file": "usr/share/help/C/meson-linguas/media/test.txt"}, + {"type": "file", "file": "usr/share/help/es/meson-linguas/media/test.txt"}, + {"type": "file", "file": "usr/share/help/es/meson-linguas/index.page"}, + {"type": "file", "file": "usr/share/help/de/meson-linguas/index.page"}, + {"type": "file", "file": "usr/share/help/de/meson-linguas/media/test.txt"} + ] +} diff --git a/meson/test cases/frameworks/14 doxygen/doc/Doxyfile.in b/meson/test cases/frameworks/14 doxygen/doc/Doxyfile.in new file mode 100644 index 000000000..69fb4aa6a --- /dev/null +++ b/meson/test cases/frameworks/14 doxygen/doc/Doxyfile.in @@ -0,0 +1,2473 @@ +# Doxyfile 1.8.13 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "The Vast Comedian Project" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = @VERSION@ + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = Comedy generator + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = doc + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = YES + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: +# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: +# Fortran. In the later case the parser tries to guess whether the code is fixed +# or free formatted code, this is the default for Fortran type files), VHDL. For +# instance to make doxygen treat .inc files as Fortran files (default is PHP), +# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 0. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 0 + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO, these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES, upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = "@TOP_SRCDIR@/include" "@TOP_SRCDIR@/src" + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, +# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf. + +FILE_PATTERNS = *.c \ + *.cc \ + *.cxx \ + *.cpp \ + *.c++ \ + *.java \ + *.ii \ + *.ixx \ + *.ipp \ + *.i++ \ + *.inl \ + *.idl \ + *.ddl \ + *.odl \ + *.h \ + *.hh \ + *.hxx \ + *.hpp \ + *.h++ \ + *.cs \ + *.d \ + *.php \ + *.php4 \ + *.php5 \ + *.phtml \ + *.inc \ + *.m \ + *.markdown \ + *.md \ + *.mm \ + *.dox \ + *.py \ + *.pyw \ + *.f90 \ + *.f95 \ + *.f03 \ + *.f08 \ + *.f \ + *.for \ + *.tcl \ + *.vhd \ + *.vhdl \ + *.ucf \ + *.qsf + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the master .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /